Merge 46a232219d into release

This commit is contained in:
github-actions[bot] 2022-11-25 04:30:01 +00:00 committed by GitHub
commit add268a76f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
89 changed files with 4051 additions and 4055 deletions

View file

@ -20,7 +20,7 @@ jobs:
- name: Install libsqlite3-dev
if: contains(matrix.os, 'ubuntu') && !contains(matrix.features, 'bundle-sqlite')
run: sudo apt-get update && sudo apt-get install libsqlite3-dev
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal

View file

@ -14,17 +14,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Install Tarpaulin
run: cargo install cargo-tarpaulin
- name: Run Tests
run: cargo tarpaulin --all-features --ignore-tests --out Xml
- name: Upload Results
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
- name: Checkout Polaris
uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Run Tarpaulin
uses: actions-rs/tarpaulin@v0.1
with:
args: "--all-features --ignore-tests"
out-type: Xml
timeout: 240
- name: Upload Results
uses: codecov/codecov-action@v2
with:
fail_ci_if_error: true

19
.github/workflows/deploy-demo.yml vendored Normal file
View file

@ -0,0 +1,19 @@
name: Deploy Demo Server
on:
workflow_dispatch:
release:
types: [released]
jobs:
trigger:
name: Trigger Demo Build
runs-on: ubuntu-latest
steps:
- name: Repository Dispatch
uses: peter-evans/repository-dispatch@v2
with:
token: ${{ secrets.POLARIS_DEMO_ACCESS_TOKEN }}
repository: agersant/polaris-demo
event-type: polaris-release

View file

@ -20,7 +20,7 @@ jobs:
target_branch: release
github_token: ${{ secrets.GITHUB_TOKEN }}
- name: Checkout Release Branch
uses: actions/checkout@master
uses: actions/checkout@v2
with:
ref: release
- name: Update Polaris Version in Cargo.toml
@ -70,7 +70,7 @@ jobs:
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
uses: actions/checkout@v2
with:
ref: release
- name: Install Rust Toolchain
@ -106,7 +106,7 @@ jobs:
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
uses: actions/checkout@v2
with:
ref: release
- name: Make release

1
.gitignore vendored
View file

@ -10,6 +10,7 @@ TestConfig.toml
# Runtime artifacts
*.sqlite
polaris.log
polaris.pid
/thumbnails
# Release process artifacts (usually runs on CI)

10
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,10 @@
{
"files.watcherExclude": {
"**/target/**": true,
"**/test-output/**": true
},
"files.exclude": {
"**/target": true,
"**/test-output": true
}
}

340
CHANGELOG.md Normal file
View file

@ -0,0 +1,340 @@
# Changelog
## Polaris 0.14.0
### General
- Changes are now documented in `CHANGELOG.md` instead of inside individual Github releases
### Server
- API version is now 7.0
- ⚠️ Removed support for authentication via cookies (deprecated in Polaris 0.13.0)
- ⚠️ Removed support for authentication via the `Basic` scheme when using the HTTP `Authorization` header (deprecated in Polaris 0.13.0)
- Fixed a bug where all music sources would be deleted when trying to add sources with duplicate names
- Additional metadata fields are now indexed: lyricist, composer, genre and label (thanks @pmphfm)
- Endpoints returning thumbnail images or audio files no longer use HTTP `content-encoding`
- When indexing files with ID3v2 tags, the "Original Date Released" frame can now be used to populate the year associated with a song
- The `/thumbnail` endpoint now supports an optional parameter for small/large/native image sizing. (thanks @Saecki)
- Log file now contain more details about the cause of failed HTTP requests (3xx, 4xx, 5xx)
- Startup failures now generate clearer error messages
### Web client
- Volume slider now applies non-linearly
- Artist names are now displayed in the Random Albums and Recent Albums pages
## Polaris 0.13.5
### Server
- Added support for AIFF and WAVE files (thanks @gahag)
### Web Client
- Improved performance when scrolling large playlists
- Fixed display and playback issues when a song was used multiple times in a playlist
- Playlist duration can now display number of days
- Fixed a bug where the playlist panel could have blank space in very tall browser windows
- Major dependencies updates
## Polaris 0.13.4
### Server
Adjustments to logging behavior.
On Linux:
- Running without `-f` emits a log file
- Running with `-f` and no `--log` option does not emit a log file
- Running with `-f` and `--log` option emits a log file
On Windows:
- Running with UI feature (`polaris.exe` in releases) emits a log file
- Running without UI feature (`polaris-cli.exe` in releases) and no --log option does not emit a log file
- Running without UI feature (`polaris-cli.exe` in releases) and --log option emits a log file
## Polaris 0.13.3
### Server
- Fixed a bug where music that is no longer on disk was still considered in the collection, even after re-indexing
- On Windows, Polaris now creates a log file
- On Linux, Polaris now creates a log file, even when running with the -f option
## Polaris 0.13.2
### Web client
- Fixed a bug where it was not possible to view or edit which users have administrator rights
- Fixed a bug where, in some cases, drag and dropping a specific disc from an album would not queue the entire disc
## Polaris 0.13.1
### Server
- Fixed a bug where the Windows installer would create unusable installations. #122
## Polaris 0.13.0
### API changes
- Bumped API version number to 6.0.
- Added new endpoints to manage users, mount points and settings more granularly.
- Added support for authenticating via bearer tokens generated by the /auth endpoint. These token can be submitted via Bearer HTTP Authorization headers, or as a URL parameters (`?auth_token=…`).
- Authentication using cookies or Basic HTTP Authorization headers is deprecated and will be removed in a future revision.
- Authentication cookies no longer expire after 24 hours. The newly added bearer tokens also have no expiration date.
- Last.fm account linking now requires a short-lived auth token obtain from the newly added `lastfm/link_token' endpoint.
Server
- ⚠Breaking change⚠ If you use a config file, the `reindex_every_n_seconds` and `album_art_pattern` fields must now be in a [settings] section.
- ⚠Breaking change⚠ The installation process on Linux has changed a lot. See the README for updated installation instructions. A summary of the changes is available [here](https://github.com/ogarcia/docker-polaris/issues/2).
- Embedded album art is now supported for mp3, flac and m4a files (thanks @Saecki).
- OPUS files can now be indexed and streamed (thanks @zaethan).
- APE files can now be indexed and streamed.
- The collection indexer has been rewritten for better performance. This also fixed an issue where on some machines, the web client would be unusable while indexing (thanks @inicola for the code reviews).
- Thumbnail generation is now slightly faster, and works with more pixel formats (notably RGBA16).
- Polaris now uses actix-web instead or rocket. This change fixes numerous performance and stability issues.
- Sqlite is now bundled by default when building Polaris and was removed from the list of prerequisites. This can be controlled with the `bundle-sqlite` feature flag when compiling Polaris.
- The default album art pattern now includes the jpeg extension in addition to jpg.
- Album art patterns are now case insensitive.
Web client
- ⚠Breaking change⚠ Your current playlist will appear broken after this update. Please clear the current playlist using the trash can icon. Saved playlists are not affected.
- Added a logout button.
- Reworked interface for managing user accounts.
- Added a shuffle button to randomly re-order the content of the current playlist.
- The total duration of the current playlist is now displayed.
- Audio output can now be toggled on/off by clicking the volume icon.
- Individual discs from multi-disc albums can now be dragged into the playlist.
- When browsing to an album, songs are now displayed and queued in filepath order.
- Fixed a bug where albums could not be dragged from the random or recent views.
- Fixed a bug where directories with a # sign in their name could not be browsed to.
## Polaris 0.12.0
### Server
- Library indexing speed is now significantly faster
- When indexing files that have malformed ID3 tags, information preceding the error will no longer be discarded
- Deleted users can no longer make requests using an existing session
- When using a config file, existing users, mounts points and DDNS settings are no longer removed before applying the configuration
- When using a config file to create users, blank usernames are now ignored
- Improved architecture and added more unit tests
API Changes
- API version number bumped to 4.0
- The auth endpoint now returns HTTP cookies instead of a JSON response
- Client requests to update Last.fm status no longer return an error if no Last.fm account is associated with the user
- The thumbnail endpoint now supports an option to disable padding to a square image
Web client
- The web client now uses Vue instead of Riot as its UI framework
- Added support for theming
## Polaris 0.11.0
### Server
- Compatible with current versions of the Rust nightly compiler
- Fixed a rare crash when indexing corrupted mp3 files
- On Linux, Polaris now notifies systemd after starting up
- Release tarball for Linux version now includes a top-level directory
- User sessions no longer break across server restarts (more improvements still to do on this: #36)
- ⚠️ Breaking change: due to improvements in Polaris credentials management, you will have to re-create your users and playlists after upgrading to this version. If you want to preserve your playlists, you can use a program like DB Browser for SQLite to back up your playlists (from db.sqlite within your Polaris installation directory) and restore them after you re-create users with the same names.
### Web client
- Song durations are now listed when available
- Fixed a bug where clicking on breadcrumbs did not always work when the Polaris server is hosted on Windows
- Current track info now shows in browser tab title
- Fixed a semi-rare bug where indexing would not start during initial setup flow
- Improved handling of untagged songs
- Fixed a bug where playlist had padding in Chrome
- Fixed a bug where folder icons did not render on some systems
Thank you to @lnicola for working on most of the server changes!
## Polaris 0.10.0
### Server
- Polaris servers now ship with an interactive API documentation, available at http://localhost:5050/swagger
- When using a prefix URL in Polaris config files, a / will no longer be added automatically at the end of the prefix
### Web client
- Automatically bring up player panel when songs are queued
- Fixed a bug where songs were not always correctly sorted by track number in browser panel
- Fixed a bug where some button hitboxes didn't match their visuals
## Polaris 0.9.0
### Server
- Rewrote all endpoints and server setup using Rocket instead of Iron
- Fixed a bug where special characters in URL to collection folders were not handled correctly (bumped API version number)
- Server API is now unit tested
- Fixed a bug where lastFM integration endpoints did not work
- ⚠️ Compiling Polaris now requires the nightly version of the Rust compiler
### Web client
- Encode special characters in URL to collection folders
## Polaris 0.8.0
### Server
- Added new API endpoints for search
- Added new API endpoints for Last.fm integration
- Thumbnails are now stored as .jpg images instead of .png
- Duration of some audio files is now being indexed
- On Linux when running as a forking process, a .pid file will be written
- Fixed a bug where usernames were inserted in session even after failed authentication
### Web client
- Added search panel
- Added settings tab to link Last.fm account
## Polaris 0.7.1
### Server
- Added support for prefix_url option in configuration files
- Improved performance of thumbnail creation
## Polaris 0.7.0
### Server
- Added support for the Partial-Content HTTP header when serving music, this fixes several streaming/seeking issues when using the web client (especially in Chrome)
- New API endpoints for playlist management
- New command line argument (-p) to run on a custom port (contribution from @jxs)
- New command line argument (-f) to run in foreground on Linux (contribution from @jxs)
- Fixed a bug where tracks were queued out of order
- Updated program icon on Windows
Web client
- Added support for playlists
- Added a button to to queue the current directory (thanks @jxs)
## Polaris 0.6.0
### Server
- Internal improvements to database management (now using Diesel)
- Configuration settings are now stored in the database, polaris.toml config files are no longer loaded by default
- Added API endpoints to read and write configuration
- User passwords are now encrypted in storage
- Fixed a bug where results of api/browse were not sorted correctly
Web client
- Settings can now be edited from the web UI
- Collection re-index can now be triggered from the web UI
- Added initial setup configuration flow to help set up first user and mount point
- Visual changes
## Polaris 0.5.1
This is a minor release, pushing quite a bit of internal cleanup in the wild.
Server
- Removed OpenSSL dependency on Windows
- No longer send a HTTP cookie after authentication
## Polaris 0.5.0
This releases adds Linux support and a variety of improvements to the web client.
### Server
- Added Linux support
- Moved location of configuration file on Windows to `%appdata%\Permafrost\Polaris\polaris.toml`
### Web client
- Performance improvements from upgrading RiotJS to 3.4.4 (from 2.6.2)
- Added support for browsing random and recently added albums
- Minor visual changes (colors, whitespace, etc.)
- Updated favicon
- Fixed a bug where songs containing special characters in their title would not play
- Persist playlist and player state across sessions
## Polaris 0.4.0
This release adds new features supporting the development of polaris-android.
### Server
- Added API endpoint to pull recently added albums
- Added support for the Authorization HTTP header (in addition to the existing /auth API endpoint)
## Polaris 0.3.0
This release is an intermediate release addressing issues with the installation process and updating internals.
### General
- Fixed missing OpenSSL DLL in Windows installer (fixes Issue #3)
- Split every file into an individual installer component
### Server
- Added API endpoint to pull random albums
- Upgraded dependencies
- Added unit tests to indexing and metadata decoding
### Web client
- Web interface playlist now displays more tracks (enough to fill a 4k monitor at normal font size)
## Polaris 0.2.0
This release is focused on polish and performance, solidifying the basics that were put together in version 0.1.0. Here are the major changes:
### General
- Polaris now has a project logo
- Windows installer now supports upgrading an existing install (from 0.2.0 to higher - versions)
- Added support for multi-disc albums
### Server
- Major performance improvements to /browse and /flatten API requests (up to 1000x - faster for large requests)
- Added API endpoint for version number
- Album covers are now served as thumbnails rather than at source size
- Moved configuration file outside of /Program Files
- Added support for Ogg Vorbis, FLAC and APE metadata
- Fixed a bug where most albums didn't show an artist name
- Fixed a bug where uppercase extensions were not recognized
- Upgraded compiler to Rust 1.13
### Web client
- Complete visual overhaul of the Polaris web client
- Performance improvements for handling large playlist in Polaris web client
- Added error messages when playing songs in unsupported formats
## Polaris 0.1.0
This is the very first Polaris release, celebrating the minimum viable product!
Features in this release:
- Server application with Windows Installer
- Support for multiple users
- Support for serving custom music directories
- Support for custom album art pattern matching
- Support for broadcasting IP to YDNS
- Web UI to browse collection, manage playlist and listen to music

2122
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@
name = "polaris"
version = "0.13.5"
authors = ["Antoine Gersant <antoine.gersant@lesforges.org>"]
edition = "2018"
edition = "2021"
build = "build.rs"
[features]
@ -11,65 +11,64 @@ bundle-sqlite = ["libsqlite3-sys"]
ui = ["native-windows-gui", "native-windows-derive"]
[dependencies]
actix-files = { version = "0.4" }
actix-web = { version = "3" }
actix-web-httpauth = { version = "0.5.0" }
anyhow = "1.0.35"
ape = "0.3.0"
actix-files = { version = "0.6" }
actix-web = { version = "4" }
actix-web-httpauth = { version = "0.8" }
ape = "0.4.0"
base64 = "0.13"
branca = "0.10.0"
cookie = { version = "0.14", features = ["signed", "key-expansion"] }
branca = "0.10.1"
crossbeam-channel = "0.5"
diesel_migrations = { version = "1.4", features = ["sqlite"] }
diesel_migrations = { version = "2.0", features = ["sqlite"] }
futures-util = { version = "0.3" }
getopts = "0.2.15"
http = "0.2.2"
id3 = "0.6.4"
libsqlite3-sys = { version = "0.18", features = ["bundled", "bundled-windows"], optional = true }
lewton = "0.10.1"
log = "0.4.5"
metaflac = "0.2.3"
mp3-duration = "0.1.9"
mp4ameta = "0.7.1"
num_cpus = "1.13.0"
getopts = "0.2.21"
http = "0.2.8"
id3 = { git = "https://github.com/polyfloyd/rust-id3.git", rev = "f3b5e3a" } # TODO update after 1.5.0 is released
lewton = "0.10.2"
libsqlite3-sys = { version = "0.25", features = ["bundled", "bundled-windows"], optional = true }
log = "0.4.17"
metaflac = "0.2.5"
mp3-duration = "0.1.10"
mp4ameta = "0.11.0"
num_cpus = "1.14.0"
opus_headers = "0.1.2"
percent-encoding = "2.1"
pbkdf2 = "0.6"
rand = "0.7"
rayon = "1.3"
regex = "1.3.9"
pbkdf2 = "0.11"
percent-encoding = "2.2"
rand = "0.8"
rayon = "1.5"
regex = "1.7.0"
rustfm-scrobble = "1.1.1"
serde = { version = "1.0.111", features = ["derive"] }
serde_derive = "1.0.111"
serde_json = "1.0.53"
simplelog = "0.8.0"
thiserror = "1.0.19"
time = "0.2"
serde = { version = "1.0.147", features = ["derive"] }
serde_derive = "1.0.147"
serde_json = "1.0.87"
simplelog = "0.12.0"
thiserror = "1.0.37"
tokio = "1.21"
toml = "0.5"
ureq = "1.5"
url = "2.1"
ureq = "1.5.5"
url = "2.3"
[dependencies.diesel]
version = "1.4.5"
version = "2.0.2"
default_features = false
features = ["libsqlite3-sys", "r2d2", "sqlite"]
[dependencies.image]
version = "0.23.12"
version = "0.24.4"
default_features = false
features = ["bmp", "gif", "jpeg", "png"]
[target.'cfg(windows)'.dependencies]
native-windows-gui = {version = "1.0.7", default-features = false, features = ["cursor", "image-decoder", "message-window", "menu", "tray-notification"], optional = true }
native-windows-derive = {version = "1.0.2", optional = true }
native-windows-gui = {version = "1.0.13", default-features = false, features = ["cursor", "image-decoder", "message-window", "menu", "tray-notification"], optional = true }
native-windows-derive = {version = "1.0.5", optional = true }
[target.'cfg(unix)'.dependencies]
daemonize = "0.4.1"
sd-notify = "0.1.0"
sd-notify = "0.4.1"
[target.'cfg(windows)'.build-dependencies]
winres = "0.1"
[dev-dependencies]
actix-test = "0.1.0"
headers = "0.3"
fs_extra = "1.2.0"

View file

@ -3,36 +3,49 @@
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE-MIT)
<img src="res/readme/logo.png?raw=true"/>
Polaris is a music streaming application, designed to let you enjoy your music collection from any computer or mobile device. Polaris works by streaming your music directly from your own computer, without uploading it to a third-party. It is free and open-source software, without any kind of premium version. The only requirement is that your computer stays on while it streams music!
Polaris is a music streaming application, designed to let you enjoy your music collection from any computer or mobile device. Polaris works by streaming music directly from your computer (or cloud server), without uploading it to a third-party. It is free and open-source software, without any kind of premium version.
## Try It Out!
Check out the demo over at https://demo.polaris.stream, featuring a selection of Creative Commons Music. The credentials to access this server are:
Username: `demo_user`
Password: `demo_password`
## Features
![Polaris Web UI](res/readme/web_ui.png?raw=true "Polaris Web UI")
- Optimized for large music collections
- Can run on Windows, Linux, BSD, or through Docker
- Listen to your music on the web or using the [Polaris Android](https://github.com/agersant/polaris-android) app
- Easy to setup and configure via the built-in web UI
- Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg` and `opus` files
- Support for album art images
- Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg`, `opus`, `ape`, `wav` and `aiff` files
- Easy to setup and administer, no configuration files needed
- Dark mode and customizable color themes
- Listen to your music on the go with [Polaris Android](https://github.com/agersant/polaris-android)
- [Last.fm](https://www.last.fm) scrobbling
- Color themes
- Restrict access to your music collection with user accounts
## Tutorials
- [Getting Started](docs/SETUP.md)
- [Streaming From Remote Devices](docs/DDNS.md)
## Screenshots
![Polaris Web UI](res/readme/web_ui.png?raw=true "Polaris Web UI")
![Polaris Web UI Dark Mode](res/readme/dark_mode.png?raw=true "Polaris Web UI")
## Documentation
- [Contribute to Polaris](docs/CONTRIBUTING.md)
- [Maintenance Runbooks](docs/MAINTENANCE.md)
### API Documentation
The Polaris server API is documented via [Swagger](https://agersant.github.io/polaris/swagger). Please note that this Swagger page does not point to a live Polaris server so the `Try it out` buttons are not expected to work.
Every installation of Polaris also distributes this documentation, with the ability to use the `Try it out` buttons. To access it, simply open http://localhost:5050/swagger/ in your browser on the machine running Polaris.
Feel free to open Github issues or Pull Requests if clarifications are needed.
The Polaris server API is documented via [Swagger](https://demo.polaris.stream/swagger/). Every installation of Polaris distributes this documentation, with the ability to use the `Try it out` buttons. To access it, simply open http://localhost:5050/swagger/ in your browser on the machine running Polaris.
## Credits & License Information
Music featured in the demo installation:
- [Chris Zabriskie - Abandon Babylon](https://chriszabriskie.bandcamp.com/album/abandon-babylon) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [Chris Zabriskie - Angie's Sunday Service](https://chriszabriskie.bandcamp.com/album/angies-sunday-service) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [glaciære - pool water blue](https://steviasphere.bandcamp.com/album/pool-water-blue) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [glaciære - light ripples](https://steviasphere.bandcamp.com/album/light-ripples) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [Koresma South](https://koresma.bandcamp.com/album/south) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
- [Pete Murphy - Essence EP](https://petemurphy.bandcamp.com/album/falling-down-the-fred-astaires-solo-jazz-piano) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
- [Rameses B - Essence EP](https://ramesesb.bandcamp.com/album/essence-ep) [(License)](https://creativecommons.org/licenses/by-nc-nd/3.0/)

View file

@ -1,14 +1,13 @@
# Maintenance
## How to make a release
- Update CHANGELOG.md to reflect new release
- On Github, go to **Actions**, select the **Make Release** workflow and click **Run workflow**
- Select the branch to deploy (usually `master`)
- Input a user-facing version name (eg: **0.13.0**)
- Click the **Run workflow** button
- After CI completes, find the release on Github and write the changelog
- Move the release from Draft to Published
Note that the Github web UI will separate the release from the corresponding tag until published.
- After CI completes, move the release from Draft to Published
## How to change the database schema

View file

@ -93,10 +93,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -125,10 +123,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -155,10 +151,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
},
@ -186,10 +180,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -215,10 +207,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
},
@ -245,10 +235,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -277,10 +265,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -309,10 +295,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -351,10 +335,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
},
@ -381,10 +363,8 @@
},
"security": [
{
"admin_http_basic": [],
"admin_http_bearer": [],
"admin_query_parameter": [],
"admin_cookie": []
"admin_query_parameter": []
}
]
}
@ -410,10 +390,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
},
@ -440,10 +418,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -453,7 +429,7 @@
"tags": [
"Users"
],
"summary": "Signs in a user. Response has Set-Cookie headers for the session, username and admin permission of the user.",
"summary": "Signs in a user.",
"operationId": "postAuth",
"requestBody": {
"required": true,
@ -506,10 +482,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -548,10 +522,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -580,10 +552,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -622,10 +592,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -654,10 +622,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -686,10 +652,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -728,10 +692,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -767,10 +729,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -791,6 +751,16 @@
"type": "string"
}
},
{
"name": "size",
"in": "query",
"description": "The maximum size of the thumbnail, either small (400x400), large (1200x1200) or native",
"schema": {
"type": "string",
"enum": ["small", "large", "native"],
"default": "small"
}
},
{
"name": "pad",
"in": "query",
@ -815,10 +785,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -847,10 +815,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -889,10 +855,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
},
@ -929,10 +893,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
},
@ -959,10 +921,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -991,10 +951,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -1023,10 +981,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -1052,10 +1008,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -1124,10 +1078,8 @@
},
"security": [
{
"auth_http_basic": [],
"auth_http_bearer": [],
"auth_query_parameter": [],
"auth_cookie": []
"auth_query_parameter": []
}
]
}
@ -1417,6 +1369,22 @@
"duration": {
"type": "integer",
"example": 571
},
"lyricist": {
"type": "string",
"example": "Timo Tolkki"
},
"composer": {
"type": "string",
"example": "Timo Tolkki"
},
"genre": {
"type": "string",
"example": "Genre"
},
"label": {
"type": "string",
"example": "Noise Records"
}
}
},
@ -1464,32 +1432,10 @@
"in": "query",
"name": "auth_token",
"description": "Identical to the auth_query_parameter scheme but only for users recognized as admin by the Polaris server"
},
"auth_http_basic": {
"type": "http",
"scheme": "basic",
"description": "[deprecated]"
},
"admin_http_basic": {
"type": "http",
"scheme": "basic",
"description": "[deprecated] Identical to the auth_http_basic scheme but only for users recognized as admin by the Polaris server"
},
"auth_cookie": {
"type": "apikey",
"in": "cookie",
"name": "session",
"description": "[deprecated] A token obtained via the SET-COOKIE header in a response to a request via the auth_http_basic scheme, or a request to the `auth` endpoint."
},
"admin_cookie": {
"type": "apikey",
"in": "cookie",
"name": "session",
"description": "[deprecated] Identical to the auth_cookie scheme but only for users recognized as admin by the Polaris server"
}
},
"links": {},
"callbacks": {}
},
"security": []
}
}

View file

@ -0,0 +1,20 @@
CREATE TEMPORARY TABLE songs_backup(id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork, duration);
INSERT INTO songs_backup SELECT id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork, duration FROM songs;
DROP TABLE songs;
CREATE TABLE songs (
id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL,
parent TEXT NOT NULL,
track_number INTEGER,
disc_number INTEGER,
title TEXT,
artist TEXT,
album_artist TEXT,
year INTEGER,
album TEXT,
artwork TEXT,
duration INTEGER,
UNIQUE(path) ON CONFLICT REPLACE
);
INSERT INTO songs SELECT * FROM songs_backup;
DROP TABLE songs_backup;

View file

@ -0,0 +1,4 @@
ALTER TABLE songs ADD COLUMN lyricist TEXT;
ALTER TABLE songs ADD COLUMN composer TEXT;
ALTER TABLE songs ADD COLUMN genre TEXT;
ALTER TABLE songs ADD COLUMN label TEXT;

View file

@ -1,7 +1,7 @@
use std::fs;
use std::path::PathBuf;
use crate::db::DB;
use crate::db::{self, DB};
use crate::paths::Paths;
pub mod config;
@ -17,6 +17,18 @@ pub mod vfs;
#[cfg(test)]
pub mod test;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Config(#[from] config::Error),
#[error(transparent)]
Database(#[from] db::Error),
#[error("Filesystem error for `{0}`: `{1}`")]
Io(PathBuf, std::io::Error),
#[error(transparent)]
Settings(#[from] settings::Error),
}
#[derive(Clone)]
pub struct App {
pub port: u16,
@ -36,12 +48,16 @@ pub struct App {
}
impl App {
pub fn new(port: u16, paths: Paths) -> anyhow::Result<Self> {
pub fn new(port: u16, paths: Paths) -> Result<Self, Error> {
let db = DB::new(&paths.db_file_path)?;
fs::create_dir_all(&paths.web_dir_path)?;
fs::create_dir_all(&paths.swagger_dir_path)?;
fs::create_dir_all(&paths.web_dir_path)
.map_err(|e| Error::Io(paths.web_dir_path.clone(), e))?;
fs::create_dir_all(&paths.swagger_dir_path)
.map_err(|e| Error::Io(paths.swagger_dir_path.clone(), e))?;
let thumbnails_dir_path = paths.cache_dir_path.join("thumbnails");
fs::create_dir_all(&thumbnails_dir_path)
.map_err(|e| Error::Io(thumbnails_dir_path.clone(), e))?;
let vfs_manager = vfs::Manager::new(db.clone());
let settings_manager = settings::Manager::new(db.clone());

195
src/app/config.rs Normal file
View file

@ -0,0 +1,195 @@
use serde::Deserialize;
use std::io::Read;
use std::path::{Path, PathBuf};
use crate::app::{ddns, settings, user, vfs};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Ddns(#[from] ddns::Error),
#[error("Filesystem error for `{0}`: `{1}`")]
Io(PathBuf, std::io::Error),
#[error(transparent)]
Settings(#[from] settings::Error),
#[error(transparent)]
Toml(#[from] toml::de::Error),
#[error(transparent)]
User(#[from] user::Error),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
#[derive(Default, Deserialize)]
pub struct Config {
pub settings: Option<settings::NewSettings>,
pub mount_dirs: Option<Vec<vfs::MountDir>>,
pub ydns: Option<ddns::Config>,
pub users: Option<Vec<user::NewUser>>,
}
impl Config {
pub fn from_path(path: &Path) -> Result<Config, Error> {
let mut config_file =
std::fs::File::open(path).map_err(|e| Error::Io(path.to_owned(), e))?;
let mut config_file_content = String::new();
config_file
.read_to_string(&mut config_file_content)
.map_err(|e| Error::Io(path.to_owned(), e))?;
let config = toml::de::from_str::<Self>(&config_file_content)?;
Ok(config)
}
}
#[derive(Clone)]
pub struct Manager {
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
}
impl Manager {
pub fn new(
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
) -> Self {
Self {
settings_manager,
user_manager,
vfs_manager,
ddns_manager,
}
}
pub fn apply(&self, config: &Config) -> Result<(), Error> {
if let Some(new_settings) = &config.settings {
self.settings_manager.amend(new_settings)?;
}
if let Some(mount_dirs) = &config.mount_dirs {
self.vfs_manager.set_mount_dirs(mount_dirs)?;
}
if let Some(ddns_config) = &config.ydns {
self.ddns_manager.set_config(ddns_config)?;
}
if let Some(ref users) = config.users {
let old_users: Vec<user::User> = self.user_manager.list()?;
// Delete users that are not in new list
for old_user in old_users
.iter()
.filter(|old_user| !users.iter().any(|u| u.name == old_user.name))
{
self.user_manager.delete(&old_user.name)?;
}
// Insert new users
for new_user in users
.iter()
.filter(|u| !old_users.iter().any(|old_user| old_user.name == u.name))
{
self.user_manager.create(new_user)?;
}
// Update users
for user in users {
self.user_manager.set_password(&user.name, &user.password)?;
self.user_manager.set_is_admin(&user.name, user.admin)?;
}
}
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::app::test;
use crate::test_name;
#[test]
fn apply_saves_misc_settings() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_config = Config {
settings: Some(settings::NewSettings {
album_art_pattern: Some("🖼️\\.jpg".into()),
reindex_every_n_seconds: Some(100),
}),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
let settings = ctx.settings_manager.read().unwrap();
let new_settings = new_config.settings.unwrap();
assert_eq!(
settings.index_album_art_pattern,
new_settings.album_art_pattern.unwrap()
);
assert_eq!(
settings.index_sleep_duration_seconds,
new_settings.reindex_every_n_seconds.unwrap()
);
}
#[test]
fn apply_saves_mount_points() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_config = Config {
mount_dirs: Some(vec![vfs::MountDir {
source: "/home/music".into(),
name: "🎵📁".into(),
}]),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
let actual_mount_dirs: Vec<vfs::MountDir> = ctx.vfs_manager.mount_dirs().unwrap();
assert_eq!(actual_mount_dirs, new_config.mount_dirs.unwrap());
}
#[test]
fn apply_saves_ddns_settings() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_config = Config {
ydns: Some(ddns::Config {
host: "🐸🐸🐸.ydns.eu".into(),
username: "kfr🐸g".into(),
password: "tasty🐞".into(),
}),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
let actual_ddns = ctx.ddns_manager.config().unwrap();
assert_eq!(actual_ddns, new_config.ydns.unwrap());
}
#[test]
fn apply_can_toggle_admin() {
let ctx = test::ContextBuilder::new(test_name!())
.user("Walter", "Tasty🍖", true)
.build();
assert!(ctx.user_manager.list().unwrap()[0].is_admin());
let new_config = Config {
users: Some(vec![user::NewUser {
name: "Walter".into(),
password: "Tasty🍖".into(),
admin: false,
}]),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
assert!(!ctx.user_manager.list().unwrap()[0].is_admin());
}
}

View file

@ -1,11 +0,0 @@
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Unspecified")]
Unspecified,
}
impl From<anyhow::Error> for Error {
fn from(_: anyhow::Error) -> Self {
Error::Unspecified
}
}

View file

@ -1,83 +0,0 @@
use super::*;
use crate::app::{ddns, settings, user, vfs};
#[derive(Clone)]
pub struct Manager {
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
}
impl Manager {
pub fn new(
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
) -> Self {
Self {
settings_manager,
user_manager,
vfs_manager,
ddns_manager,
}
}
pub fn apply(&self, config: &Config) -> Result<(), Error> {
if let Some(new_settings) = &config.settings {
self.settings_manager
.amend(new_settings)
.map_err(|_| Error::Unspecified)?;
}
if let Some(mount_dirs) = &config.mount_dirs {
self.vfs_manager
.set_mount_dirs(&mount_dirs)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ddns_config) = &config.ydns {
self.ddns_manager
.set_config(&ddns_config)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ref users) = config.users {
let old_users: Vec<user::User> =
self.user_manager.list().map_err(|_| Error::Unspecified)?;
// Delete users that are not in new list
for old_user in old_users
.iter()
.filter(|old_user| !users.iter().any(|u| u.name == old_user.name))
{
self.user_manager
.delete(&old_user.name)
.map_err(|_| Error::Unspecified)?;
}
// Insert new users
for new_user in users
.iter()
.filter(|u| !old_users.iter().any(|old_user| old_user.name == u.name))
{
self.user_manager
.create(new_user)
.map_err(|_| Error::Unspecified)?;
}
// Update users
for user in users {
self.user_manager
.set_password(&user.name, &user.password)
.map_err(|_| Error::Unspecified)?;
self.user_manager
.set_is_admin(&user.name, user.admin)
.map_err(|_| Error::Unspecified)?;
}
}
Ok(())
}
}

View file

@ -1,31 +0,0 @@
use serde::Deserialize;
use std::io::Read;
use std::path;
use crate::app::{ddns, settings, user, vfs};
mod error;
mod manager;
#[cfg(test)]
mod test;
pub use error::*;
pub use manager::*;
#[derive(Default, Deserialize)]
pub struct Config {
pub settings: Option<settings::NewSettings>,
pub mount_dirs: Option<Vec<vfs::MountDir>>,
pub ydns: Option<ddns::Config>,
pub users: Option<Vec<user::NewUser>>,
}
impl Config {
pub fn from_path(path: &path::Path) -> anyhow::Result<Config> {
let mut config_file = std::fs::File::open(path)?;
let mut config_file_content = String::new();
config_file.read_to_string(&mut config_file_content)?;
let config = toml::de::from_str::<Self>(&config_file_content)?;
Ok(config)
}
}

View file

@ -1,83 +0,0 @@
use super::*;
use crate::app::{ddns, settings, test, user, vfs};
use crate::test_name;
#[test]
fn apply_saves_misc_settings() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_config = Config {
settings: Some(settings::NewSettings {
album_art_pattern: Some("🖼️\\.jpg".into()),
reindex_every_n_seconds: Some(100),
..Default::default()
}),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
let settings = ctx.settings_manager.read().unwrap();
let new_settings = new_config.settings.unwrap();
assert_eq!(
settings.album_art_pattern,
new_settings.album_art_pattern.unwrap()
);
assert_eq!(
settings.reindex_every_n_seconds,
new_settings.reindex_every_n_seconds.unwrap()
);
}
#[test]
fn apply_saves_mount_points() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_config = Config {
mount_dirs: Some(vec![vfs::MountDir {
source: "/home/music".into(),
name: "🎵📁".into(),
}]),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
let actual_mount_dirs: Vec<vfs::MountDir> = ctx.vfs_manager.mount_dirs().unwrap();
assert_eq!(actual_mount_dirs, new_config.mount_dirs.unwrap());
}
#[test]
fn apply_saves_ddns_settings() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_config = Config {
ydns: Some(ddns::Config {
host: "🐸🐸🐸.ydns.eu".into(),
username: "kfr🐸g".into(),
password: "tasty🐞".into(),
}),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
let actual_ddns = ctx.ddns_manager.config().unwrap();
assert_eq!(actual_ddns, new_config.ydns.unwrap());
}
#[test]
fn apply_can_toggle_admin() {
let ctx = test::ContextBuilder::new(test_name!())
.user("Walter", "Tasty🍖", true)
.build();
assert!(ctx.user_manager.list().unwrap()[0].is_admin());
let new_config = Config {
users: Some(vec![user::NewUser {
name: "Walter".into(),
password: "Tasty🍖".into(),
admin: false,
}]),
..Default::default()
};
ctx.config_manager.apply(&new_config).unwrap();
assert!(!ctx.user_manager.list().unwrap()[0].is_admin());
}

View file

@ -1,15 +1,31 @@
use anyhow::*;
use diesel::prelude::*;
use log::{error, info};
use serde::{Deserialize, Serialize};
use std::thread;
use std::time;
use ureq;
use super::*;
use crate::db::DB;
use crate::db::{self, ddns_config, DB};
const DDNS_UPDATE_URL: &str = "https://ydns.io/api/v1/update/";
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("DDNS update query failed with HTTP status code `{0}`")]
UpdateQueryFailed(u16),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
Database(#[from] diesel::result::Error),
}
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Eq, Queryable, Serialize)]
#[diesel(table_name = ddns_config)]
pub struct Config {
pub host: String,
pub username: String,
pub password: String,
}
#[derive(Clone)]
pub struct Manager {
db: DB,
@ -20,7 +36,7 @@ impl Manager {
Self { db }
}
fn update_my_ip(&self) -> Result<()> {
fn update_my_ip(&self) -> Result<(), Error> {
let config = self.config()?;
if config.host.is_empty() || config.username.is_empty() {
info!("Skipping DDNS update because credentials are missing");
@ -32,34 +48,31 @@ impl Manager {
.auth(&config.username, &config.password)
.call();
if !response.ok() {
bail!(
"DDNS update query failed with status code: {}",
response.status()
);
if response.ok() {
Ok(())
} else {
Err(Error::UpdateQueryFailed(response.status()))
}
Ok(())
}
pub fn config(&self) -> Result<Config> {
pub fn config(&self) -> Result<Config, Error> {
use crate::db::ddns_config::dsl::*;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
Ok(ddns_config
.select((host, username, password))
.get_result(&connection)?)
.get_result(&mut connection)?)
}
pub fn set_config(&self, new_config: &Config) -> Result<()> {
pub fn set_config(&self, new_config: &Config) -> Result<(), Error> {
use crate::db::ddns_config::dsl::*;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
diesel::update(ddns_config)
.set((
host.eq(&new_config.host),
username.eq(&new_config.username),
password.eq(&new_config.password),
))
.execute(&connection)?;
.execute(&mut connection)?;
Ok(())
}

View file

@ -1,11 +0,0 @@
use serde::{Deserialize, Serialize};
use crate::db::ddns_config;
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Queryable, Serialize)]
#[table_name = "ddns_config"]
pub struct Config {
pub host: String,
pub username: String,
pub password: String,
}

View file

@ -1,5 +0,0 @@
mod config;
mod manager;
pub use config::Config;
pub use manager::Manager;

View file

@ -1,4 +1,3 @@
use diesel;
use log::error;
use std::sync::{Arc, Condvar, Mutex};
use std::time::Duration;
@ -31,7 +30,12 @@ impl Index {
db,
vfs_manager,
settings_manager,
pending_reindex: Arc::new((Mutex::new(false), Condvar::new())),
pending_reindex: Arc::new((
#[allow(clippy::mutex_atomic)]
Mutex::new(false),
Condvar::new(),
)),
};
let commands_index = index.clone();

View file

@ -1,20 +1,34 @@
use anyhow::*;
use ape;
use id3;
use id3::TagLike;
use lewton::inside_ogg::OggStreamReader;
use log::error;
use metaflac;
use mp3_duration;
use mp4ameta;
use opus_headers;
use regex::Regex;
use std::fs;
use std::path::Path;
use std::path::{Path, PathBuf};
use crate::utils;
use crate::utils::AudioFormat;
#[derive(Debug, Clone, PartialEq)]
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Ape(#[from] ape::Error),
#[error(transparent)]
Id3(#[from] id3::Error),
#[error("Filesystem error for `{0}`: `{1}`")]
Io(PathBuf, std::io::Error),
#[error(transparent)]
Metaflac(#[from] metaflac::Error),
#[error(transparent)]
Mp4aMeta(#[from] mp4ameta::Error),
#[error(transparent)]
Opus(#[from] opus_headers::ParseError),
#[error(transparent)]
Vorbis(#[from] lewton::VorbisError),
#[error("Could not find a Vorbis comment within flac file")]
VorbisCommentNotFoundInFlacFile,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SongTags {
pub disc_number: Option<u32>,
pub track_number: Option<u32>,
@ -25,6 +39,10 @@ pub struct SongTags {
pub album: Option<String>,
pub year: Option<i32>,
pub has_artwork: bool,
pub lyricist: Option<String>,
pub composer: Option<String>,
pub genre: Option<String>,
pub label: Option<String>,
}
impl From<id3::Tag> for SongTags {
@ -40,48 +58,72 @@ impl From<id3::Tag> for SongTags {
.year()
.map(|y| y as i32)
.or_else(|| tag.date_released().map(|d| d.year))
.or_else(|| tag.original_date_released().map(|d| d.year))
.or_else(|| tag.date_recorded().map(|d| d.year));
let has_artwork = tag.pictures().count() > 0;
let lyricist = tag.get_text("TEXT");
let composer = tag.get_text("TCOM");
let genre = tag.genre().map(|s| s.to_string());
let label = tag.get_text("TPUB");
SongTags {
disc_number,
track_number,
title,
duration,
artist,
album_artist,
album,
title,
duration,
disc_number,
track_number,
year,
has_artwork,
lyricist,
composer,
genre,
label,
}
}
}
pub fn read(path: &Path) -> Option<SongTags> {
let data = match utils::get_audio_format(path) {
Some(AudioFormat::AIFF) => Some(read_aiff(path)),
Some(AudioFormat::APE) => Some(read_ape(path)),
Some(AudioFormat::FLAC) => Some(read_flac(path)),
Some(AudioFormat::MP3) => Some(read_mp3(path)),
Some(AudioFormat::MP4) => Some(read_mp4(path)),
Some(AudioFormat::MPC) => Some(read_ape(path)),
Some(AudioFormat::OGG) => Some(read_vorbis(path)),
Some(AudioFormat::OPUS) => Some(read_opus(path)),
Some(AudioFormat::WAVE) => Some(read_wave(path)),
None => None,
Some(AudioFormat::AIFF) => read_aiff(path),
Some(AudioFormat::APE) => read_ape(path),
Some(AudioFormat::FLAC) => read_flac(path),
Some(AudioFormat::MP3) => read_mp3(path),
Some(AudioFormat::MP4) => read_mp4(path),
Some(AudioFormat::MPC) => read_ape(path),
Some(AudioFormat::OGG) => read_vorbis(path),
Some(AudioFormat::OPUS) => read_opus(path),
Some(AudioFormat::WAVE) => read_wave(path),
None => return None,
};
match data {
Some(Ok(d)) => Some(d),
Some(Err(e)) => {
Ok(d) => Some(d),
Err(e) => {
error!("Error while reading file metadata for '{:?}': {}", path, e);
None
}
None => None,
}
}
fn read_mp3(path: &Path) -> Result<SongTags> {
let tag = id3::Tag::read_from_path(&path).or_else(|error| {
trait FrameContent {
/// Returns the value stored, if any, in the Frame.
/// Say "TCOM" returns composer field.
fn get_text(&self, key: &str) -> Option<String>;
}
impl FrameContent for id3::Tag {
fn get_text(&self, key: &str) -> Option<String> {
let frame = self.get(key)?;
match frame.content() {
id3::Content::Text(value) => Some(value.to_string()),
_ => None,
}
}
}
fn read_mp3(path: &Path) -> Result<SongTags, Error> {
let tag = id3::Tag::read_from_path(path).or_else(|error| {
if let Some(tag) = error.partial_tag {
Ok(tag)
} else {
@ -90,7 +132,7 @@ fn read_mp3(path: &Path) -> Result<SongTags> {
})?;
let duration = {
mp3_duration::from_path(&path)
mp3_duration::from_path(path)
.map(|d| d.as_secs() as u32)
.ok()
};
@ -100,8 +142,8 @@ fn read_mp3(path: &Path) -> Result<SongTags> {
Ok(song_tags)
}
fn read_aiff(path: &Path) -> Result<SongTags> {
let tag = id3::Tag::read_from_aiff(&path).or_else(|error| {
fn read_aiff(path: &Path) -> Result<SongTags, Error> {
let tag = id3::Tag::read_from_aiff_path(path).or_else(|error| {
if let Some(tag) = error.partial_tag {
Ok(tag)
} else {
@ -111,8 +153,8 @@ fn read_aiff(path: &Path) -> Result<SongTags> {
Ok(tag.into())
}
fn read_wave(path: &Path) -> Result<SongTags> {
let tag = id3::Tag::read_from_wav(&path).or_else(|error| {
fn read_wave(path: &Path) -> Result<SongTags, Error> {
let tag = id3::Tag::read_from_wav_path(path).or_else(|error| {
if let Some(tag) = error.partial_tag {
Ok(tag)
} else {
@ -150,8 +192,8 @@ fn read_ape_x_of_y(item: &ape::Item) -> Option<u32> {
}
}
fn read_ape(path: &Path) -> Result<SongTags> {
let tag = ape::read(path)?;
fn read_ape(path: &Path) -> Result<SongTags, Error> {
let tag = ape::read_from_path(path)?;
let artist = tag.item("Artist").and_then(read_ape_string);
let album = tag.item("Album").and_then(read_ape_string);
let album_artist = tag.item("Album artist").and_then(read_ape_string);
@ -159,6 +201,10 @@ fn read_ape(path: &Path) -> Result<SongTags> {
let year = tag.item("Year").and_then(read_ape_i32);
let disc_number = tag.item("Disc").and_then(read_ape_x_of_y);
let track_number = tag.item("Track").and_then(read_ape_x_of_y);
let lyricist = tag.item("LYRICIST").and_then(read_ape_string);
let composer = tag.item("COMPOSER").and_then(read_ape_string);
let genre = tag.item("GENRE").and_then(read_ape_string);
let label = tag.item("PUBLISHER").and_then(read_ape_string);
Ok(SongTags {
artist,
album_artist,
@ -169,11 +215,15 @@ fn read_ape(path: &Path) -> Result<SongTags> {
track_number,
year,
has_artwork: false,
lyricist,
composer,
genre,
label,
})
}
fn read_vorbis(path: &Path) -> Result<SongTags> {
let file = fs::File::open(path)?;
fn read_vorbis(path: &Path) -> Result<SongTags, Error> {
let file = fs::File::open(path).map_err(|e| Error::Io(path.to_owned(), e))?;
let source = OggStreamReader::new(file)?;
let mut tags = SongTags {
@ -186,6 +236,10 @@ fn read_vorbis(path: &Path) -> Result<SongTags> {
track_number: None,
year: None,
has_artwork: false,
lyricist: None,
composer: None,
genre: None,
label: None,
};
for (key, value) in source.comment_hdr.comment_list {
@ -198,6 +252,10 @@ fn read_vorbis(path: &Path) -> Result<SongTags> {
"TRACKNUMBER" => tags.track_number = value.parse::<u32>().ok(),
"DISCNUMBER" => tags.disc_number = value.parse::<u32>().ok(),
"DATE" => tags.year = value.parse::<i32>().ok(),
"LYRICIST" => tags.lyricist = Some(value),
"COMPOSER" => tags.composer = Some(value),
"GENRE" => tags.genre = Some(value),
"PUBLISHER" => tags.label = Some(value),
_ => (),
}
}
@ -206,7 +264,7 @@ fn read_vorbis(path: &Path) -> Result<SongTags> {
Ok(tags)
}
fn read_opus(path: &Path) -> Result<SongTags> {
fn read_opus(path: &Path) -> Result<SongTags, Error> {
let headers = opus_headers::parse_from_path(path)?;
let mut tags = SongTags {
@ -219,6 +277,10 @@ fn read_opus(path: &Path) -> Result<SongTags> {
track_number: None,
year: None,
has_artwork: false,
lyricist: None,
composer: None,
genre: None,
label: None,
};
for (key, value) in headers.comments.user_comments {
@ -231,6 +293,10 @@ fn read_opus(path: &Path) -> Result<SongTags> {
"TRACKNUMBER" => tags.track_number = value.parse::<u32>().ok(),
"DISCNUMBER" => tags.disc_number = value.parse::<u32>().ok(),
"DATE" => tags.year = value.parse::<i32>().ok(),
"LYRICIST" => tags.lyricist = Some(value),
"COMPOSER" => tags.composer = Some(value),
"GENRE" => tags.genre = Some(value),
"PUBLISHER" => tags.label = Some(value),
_ => (),
}
}
@ -239,11 +305,11 @@ fn read_opus(path: &Path) -> Result<SongTags> {
Ok(tags)
}
fn read_flac(path: &Path) -> Result<SongTags> {
fn read_flac(path: &Path) -> Result<SongTags, Error> {
let tag = metaflac::Tag::read_from_path(path)?;
let vorbis = tag
.vorbis_comments()
.ok_or(anyhow!("Missing Vorbis comments"))?;
.ok_or(Error::VorbisCommentNotFoundInFlacFile)?;
let disc_number = vorbis
.get("DISCNUMBER")
.and_then(|d| d[0].parse::<u32>().ok());
@ -267,22 +333,31 @@ fn read_flac(path: &Path) -> Result<SongTags> {
track_number: vorbis.track(),
year,
has_artwork,
lyricist: vorbis.get("LYRICIST").map(|v| v[0].clone()),
composer: vorbis.get("COMPOSER").map(|v| v[0].clone()),
genre: vorbis.get("GENRE").map(|v| v[0].clone()),
label: vorbis.get("PUBLISHER").map(|v| v[0].clone()),
})
}
fn read_mp4(path: &Path) -> Result<SongTags> {
fn read_mp4(path: &Path) -> Result<SongTags, Error> {
let mut tag = mp4ameta::Tag::read_from_path(path)?;
let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label");
Ok(SongTags {
artist: tag.take_artist(),
album_artist: tag.take_album_artist(),
album: tag.take_album(),
title: tag.take_title(),
duration: tag.duration().map(|v| v as u32),
duration: tag.duration().map(|v| v.as_secs() as u32),
disc_number: tag.disc_number().map(|d| d as u32),
track_number: tag.track_number().map(|d| d as u32),
year: tag.year().and_then(|v| v.parse::<i32>().ok()),
has_artwork: tag.artwork().is_some(),
lyricist: tag.take_lyricist(),
composer: tag.take_composer(),
genre: tag.take_genre(),
label: tag.take_strings_of(&label_ident).next(),
})
}
@ -298,6 +373,10 @@ fn reads_file_metadata() {
duration: None,
year: Some(2016),
has_artwork: false,
lyricist: Some("TEST LYRICIST".into()),
composer: Some("TEST COMPOSER".into()),
genre: Some("TEST GENRE".into()),
label: Some("TEST LABEL".into()),
};
let flac_sample_tag = SongTags {
duration: Some(0),

View file

@ -1,31 +1,26 @@
use anyhow::*;
use diesel;
use diesel::dsl::sql;
use diesel::prelude::*;
use diesel::sql_types;
use std::path::Path;
use std::path::{Path, PathBuf};
use super::*;
use crate::db::{directories, songs};
use crate::db::{self, directories, songs};
#[derive(thiserror::Error, Debug)]
pub enum QueryError {
#[error("VFS path not found")]
VFSPathNotFound,
#[error("Unspecified")]
Unspecified,
#[error(transparent)]
Database(#[from] diesel::result::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error("Song was not found: `{0}`")]
SongNotFound(PathBuf),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
impl From<anyhow::Error> for QueryError {
fn from(_: anyhow::Error) -> Self {
QueryError::Unspecified
}
}
no_arg_sql_function!(
random,
sql_types::Integer,
"Represents the SQL RANDOM() function"
sql_function!(
#[aggregate]
fn random() -> Integer;
);
impl Index {
@ -35,30 +30,26 @@ impl Index {
{
let mut output = Vec::new();
let vfs = self.vfs_manager.get_vfs()?;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
if virtual_path.as_ref().components().count() == 0 {
// Browse top-level
let real_directories: Vec<Directory> = directories::table
.filter(directories::parent.is_null())
.load(&connection)
.map_err(anyhow::Error::new)?;
.load(&mut connection)?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
output.extend(virtual_directories.map(CollectionFile::Directory));
} else {
// Browse sub-directory
let real_path = vfs
.virtual_to_real(virtual_path)
.map_err(|_| QueryError::VFSPathNotFound)?;
let real_path = vfs.virtual_to_real(virtual_path)?;
let real_path_string = real_path.as_path().to_string_lossy().into_owned();
let real_directories: Vec<Directory> = directories::table
.filter(directories::parent.eq(&real_path_string))
.order(sql::<sql_types::Bool>("path COLLATE NOCASE ASC"))
.load(&connection)
.map_err(anyhow::Error::new)?;
.load(&mut connection)?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
@ -67,8 +58,7 @@ impl Index {
let real_songs: Vec<Song> = songs::table
.filter(songs::parent.eq(&real_path_string))
.order(sql::<sql_types::Bool>("path COLLATE NOCASE ASC"))
.load(&connection)
.map_err(anyhow::Error::new)?;
.load(&mut connection)?;
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
output.extend(virtual_songs.map(CollectionFile::Song));
}
@ -82,66 +72,60 @@ impl Index {
{
use self::songs::dsl::*;
let vfs = self.vfs_manager.get_vfs()?;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
let real_songs: Vec<Song> = if virtual_path.as_ref().parent() != None {
let real_path = vfs
.virtual_to_real(virtual_path)
.map_err(|_| QueryError::VFSPathNotFound)?;
let real_songs: Vec<Song> = if virtual_path.as_ref().parent().is_some() {
let real_path = vfs.virtual_to_real(virtual_path)?;
let song_path_filter = {
let mut path_buf = real_path.clone();
let mut path_buf = real_path;
path_buf.push("%");
path_buf.as_path().to_string_lossy().into_owned()
};
songs
.filter(path.like(&song_path_filter))
.order(path)
.load(&connection)
.map_err(anyhow::Error::new)?
.load(&mut connection)?
} else {
songs
.order(path)
.load(&connection)
.map_err(anyhow::Error::new)?
songs.order(path).load(&mut connection)?
};
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
Ok(virtual_songs.collect::<Vec<_>>())
}
pub fn get_random_albums(&self, count: i64) -> Result<Vec<Directory>> {
pub fn get_random_albums(&self, count: i64) -> Result<Vec<Directory>, QueryError> {
use self::directories::dsl::*;
let vfs = self.vfs_manager.get_vfs()?;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
let real_directories: Vec<Directory> = directories
.filter(album.is_not_null())
.limit(count)
.order(random)
.load(&connection)?;
.order(random())
.load(&mut connection)?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
Ok(virtual_directories.collect::<Vec<_>>())
}
pub fn get_recent_albums(&self, count: i64) -> Result<Vec<Directory>> {
pub fn get_recent_albums(&self, count: i64) -> Result<Vec<Directory>, QueryError> {
use self::directories::dsl::*;
let vfs = self.vfs_manager.get_vfs()?;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
let real_directories: Vec<Directory> = directories
.filter(album.is_not_null())
.order(date_added.desc())
.limit(count)
.load(&connection)?;
.load(&mut connection)?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
Ok(virtual_directories.collect::<Vec<_>>())
}
pub fn search(&self, query: &str) -> Result<Vec<CollectionFile>> {
pub fn search(&self, query: &str) -> Result<Vec<CollectionFile>, QueryError> {
let vfs = self.vfs_manager.get_vfs()?;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
let like_test = format!("%{}%", query);
let mut output = Vec::new();
@ -151,7 +135,7 @@ impl Index {
let real_directories: Vec<Directory> = directories
.filter(path.like(&like_test))
.filter(parent.not_like(&like_test))
.load(&connection)?;
.load(&mut connection)?;
let virtual_directories = real_directories
.into_iter()
@ -172,7 +156,7 @@ impl Index {
.or(album_artist.like(&like_test)),
)
.filter(parent.not_like(&like_test))
.load(&connection)?;
.load(&mut connection)?;
let virtual_songs = real_songs.into_iter().filter_map(|d| d.virtualize(&vfs));
@ -182,9 +166,9 @@ impl Index {
Ok(output)
}
pub fn get_song(&self, virtual_path: &Path) -> Result<Song> {
pub fn get_song(&self, virtual_path: &Path) -> Result<Song, QueryError> {
let vfs = self.vfs_manager.get_vfs()?;
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
let real_path = vfs.virtual_to_real(virtual_path)?;
let real_path_string = real_path.as_path().to_string_lossy();
@ -192,11 +176,11 @@ impl Index {
use self::songs::dsl::*;
let real_song: Song = songs
.filter(path.eq(real_path_string))
.get_result(&connection)?;
.get_result(&mut connection)?;
match real_song.virtualize(&vfs) {
Some(s) => Ok(s),
_ => bail!("Missing VFS mapping"),
None => Err(QueryError::SongNotFound(real_path)),
}
}
}

View file

@ -18,9 +18,9 @@ fn update_adds_new_content() {
ctx.index.update().unwrap();
ctx.index.update().unwrap(); // Validates that subsequent updates don't run into conflicts
let connection = ctx.db.connect().unwrap();
let all_directories: Vec<Directory> = directories::table.load(&connection).unwrap();
let all_songs: Vec<Song> = songs::table.load(&connection).unwrap();
let mut connection = ctx.db.connect().unwrap();
let all_directories: Vec<Directory> = directories::table.load(&mut connection).unwrap();
let all_songs: Vec<Song> = songs::table.load(&mut connection).unwrap();
assert_eq!(all_directories.len(), 6);
assert_eq!(all_songs.len(), 13);
}
@ -47,9 +47,9 @@ fn update_removes_missing_content() {
ctx.index.update().unwrap();
{
let connection = ctx.db.connect().unwrap();
let all_directories: Vec<Directory> = directories::table.load(&connection).unwrap();
let all_songs: Vec<Song> = songs::table.load(&connection).unwrap();
let mut connection = ctx.db.connect().unwrap();
let all_directories: Vec<Directory> = directories::table.load(&mut connection).unwrap();
let all_songs: Vec<Song> = songs::table.load(&mut connection).unwrap();
assert_eq!(all_directories.len(), 6);
assert_eq!(all_songs.len(), 13);
}
@ -58,9 +58,9 @@ fn update_removes_missing_content() {
std::fs::remove_dir_all(&khemmis_directory).unwrap();
ctx.index.update().unwrap();
{
let connection = ctx.db.connect().unwrap();
let all_directories: Vec<Directory> = directories::table.load(&connection).unwrap();
let all_songs: Vec<Song> = songs::table.load(&connection).unwrap();
let mut connection = ctx.db.connect().unwrap();
let all_directories: Vec<Directory> = directories::table.load(&mut connection).unwrap();
let all_songs: Vec<Song> = songs::table.load(&mut connection).unwrap();
assert_eq!(all_directories.len(), 4);
assert_eq!(all_songs.len(), 8);
}
@ -211,15 +211,12 @@ fn album_art_pattern_is_case_insensitive() {
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build();
let patterns = vec!["folder", "FOLDER"]
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>();
let patterns = vec!["folder", "FOLDER"];
for pattern in patterns.into_iter() {
ctx.settings_manager
.amend(&settings::NewSettings {
album_art_pattern: Some(pattern),
album_art_pattern: Some(pattern.to_owned()),
..Default::default()
})
.unwrap();

View file

@ -4,14 +4,14 @@ use std::path::Path;
use crate::app::vfs::VFS;
use crate::db::songs;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum CollectionFile {
Directory(Directory),
Song(Song),
}
#[derive(Debug, PartialEq, Queryable, QueryableByName, Serialize, Deserialize)]
#[table_name = "songs"]
#[derive(Debug, PartialEq, Eq, Queryable, QueryableByName, Serialize, Deserialize)]
#[diesel(table_name = songs)]
pub struct Song {
#[serde(skip_serializing, skip_deserializing)]
id: i32,
@ -27,6 +27,10 @@ pub struct Song {
pub album: Option<String>,
pub artwork: Option<String>,
pub duration: Option<i32>,
pub lyricist: Option<String>,
pub composer: Option<String>,
pub genre: Option<String>,
pub label: Option<String>,
}
impl Song {
@ -45,7 +49,7 @@ impl Song {
}
}
#[derive(Debug, PartialEq, Queryable, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Eq, Queryable, Serialize, Deserialize)]
pub struct Directory {
#[serde(skip_serializing, skip_deserializing)]
id: i32,

View file

@ -1,4 +1,3 @@
use anyhow::*;
use log::{error, info};
use std::time;
@ -7,18 +6,33 @@ mod collector;
mod inserter;
mod traverser;
use super::*;
use crate::app::index::Index;
use crate::app::vfs;
use crate::db;
use cleaner::Cleaner;
use collector::Collector;
use inserter::Inserter;
use traverser::Traverser;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
IndexClean(#[from] cleaner::Error),
#[error(transparent)]
Database(#[from] diesel::result::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
impl Index {
pub fn update(&self) -> Result<()> {
pub fn update(&self) -> Result<(), Error> {
let start = time::Instant::now();
info!("Beginning library index update");
let album_art_pattern = self.settings_manager.get_index_album_art_pattern()?;
let album_art_pattern = self.settings_manager.get_index_album_art_pattern().ok();
let cleaner = Cleaner::new(self.db.clone(), self.vfs_manager.clone());
cleaner.clean()?;

View file

@ -1,14 +1,24 @@
use anyhow::*;
use diesel;
use diesel::prelude::*;
use rayon::prelude::*;
use std::path::Path;
use crate::app::vfs;
use crate::db::{directories, songs, DB};
use crate::db::{self, directories, songs, DB};
const INDEX_BUILDING_CLEAN_BUFFER_SIZE: usize = 500; // Deletions in each transaction
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Database(#[from] diesel::result::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
pub struct Cleaner {
db: DB,
vfs_manager: vfs::Manager,
@ -19,19 +29,19 @@ impl Cleaner {
Self { db, vfs_manager }
}
pub fn clean(&self) -> Result<()> {
pub fn clean(&self) -> Result<(), Error> {
let vfs = self.vfs_manager.get_vfs()?;
let all_directories: Vec<String> = {
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
directories::table
.select(directories::path)
.load(&connection)?
.load(&mut connection)?
};
let all_songs: Vec<String> = {
let connection = self.db.connect()?;
songs::table.select(songs::path).load(&connection)?
let mut connection = self.db.connect()?;
songs::table.select(songs::path).load(&mut connection)?
};
let list_missing_directories = || {
@ -59,14 +69,14 @@ impl Cleaner {
thread_pool.join(list_missing_directories, list_missing_songs);
{
let connection = self.db.connect()?;
let mut connection = self.db.connect()?;
for chunk in missing_directories[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
diesel::delete(directories::table.filter(directories::path.eq_any(chunk)))
.execute(&connection)?;
.execute(&mut connection)?;
}
for chunk in missing_songs[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
diesel::delete(songs::table.filter(songs::path.eq_any(chunk)))
.execute(&connection)?;
.execute(&mut connection)?;
}
}

View file

@ -7,14 +7,14 @@ use super::*;
pub struct Collector {
receiver: Receiver<traverser::Directory>,
sender: Sender<inserter::Item>,
album_art_pattern: Regex,
album_art_pattern: Option<Regex>,
}
impl Collector {
pub fn new(
receiver: Receiver<traverser::Directory>,
sender: Sender<inserter::Item>,
album_art_pattern: Regex,
album_art_pattern: Option<Regex>,
) -> Self {
Self {
receiver,
@ -24,11 +24,8 @@ impl Collector {
}
pub fn collect(&self) {
loop {
match self.receiver.recv() {
Ok(directory) => self.collect_directory(directory),
Err(_) => break,
}
while let Ok(directory) = self.receiver.recv() {
self.collect_directory(directory);
}
}
@ -88,6 +85,10 @@ impl Collector {
album: tags.album,
year: tags.year,
artwork: artwork_path,
lyricist: tags.lyricist,
composer: tags.composer,
genre: tags.genre,
label: tags.label,
})) {
error!("Error while sending song from collector: {}", e);
}
@ -122,8 +123,11 @@ impl Collector {
let regex_artwork = directory.other_files.iter().find_map(|path| {
let matches = path
.file_name()
.and_then(|n| n.to_str())
.map(|n| self.album_art_pattern.is_match(n))
.and_then(|name| name.to_str())
.map(|name| match &self.album_art_pattern {
Some(pattern) => pattern.is_match(name),
None => false,
})
.unwrap_or(false);
if matches {
Some(path.to_string_lossy().to_string())

View file

@ -1,6 +1,4 @@
use anyhow::*;
use crossbeam_channel::Receiver;
use diesel;
use diesel::prelude::*;
use log::error;
@ -9,7 +7,7 @@ use crate::db::{directories, songs, DB};
const INDEX_BUILDING_INSERT_BUFFER_SIZE: usize = 1000; // Insertions in each transaction
#[derive(Debug, Insertable)]
#[table_name = "songs"]
#[diesel(table_name = songs)]
pub struct Song {
pub path: String,
pub parent: String,
@ -22,10 +20,14 @@ pub struct Song {
pub album: Option<String>,
pub artwork: Option<String>,
pub duration: Option<i32>,
pub lyricist: Option<String>,
pub composer: Option<String>,
pub genre: Option<String>,
pub label: Option<String>,
}
#[derive(Debug, Insertable)]
#[table_name = "directories"]
#[diesel(table_name = directories)]
pub struct Directory {
pub path: String,
pub parent: Option<String>,
@ -53,19 +55,16 @@ impl Inserter {
let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
let new_songs = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
Self {
db,
receiver,
new_directories,
new_songs,
db,
}
}
pub fn insert(&mut self) {
loop {
match self.receiver.recv() {
Ok(item) => self.insert_item(item),
Err(_) => break,
}
while let Ok(item) = self.receiver.recv() {
self.insert_item(item);
}
}
@ -87,34 +86,26 @@ impl Inserter {
}
fn flush_directories(&mut self) {
if self
.db
.connect()
.and_then(|connection| {
diesel::insert_into(directories::table)
.values(&self.new_directories)
.execute(&*connection) // TODO https://github.com/diesel-rs/diesel/issues/1822
.map_err(Error::new)
})
.is_err()
{
let res = self.db.connect().ok().and_then(|mut connection| {
diesel::insert_into(directories::table)
.values(&self.new_directories)
.execute(&mut *connection) // TODO https://github.com/diesel-rs/diesel/issues/1822
.ok()
});
if res.is_none() {
error!("Could not insert new directories in database");
}
self.new_directories.clear();
}
fn flush_songs(&mut self) {
if self
.db
.connect()
.and_then(|connection| {
diesel::insert_into(songs::table)
.values(&self.new_songs)
.execute(&*connection) // TODO https://github.com/diesel-rs/diesel/issues/1822
.map_err(Error::new)
})
.is_err()
{
let res = self.db.connect().ok().and_then(|mut connection| {
diesel::insert_into(songs::table)
.values(&self.new_songs)
.execute(&mut *connection) // TODO https://github.com/diesel-rs/diesel/issues/1822
.ok()
});
if res.is_none() {
error!("Could not insert new songs in database");
}
self.new_songs.clear();
@ -123,10 +114,10 @@ impl Inserter {
impl Drop for Inserter {
fn drop(&mut self) {
if self.new_directories.len() > 0 {
if !self.new_directories.is_empty() {
self.flush_directories();
}
if self.new_songs.len() > 0 {
if !self.new_songs.is_empty() {
self.flush_songs();
}
}

View file

@ -49,7 +49,7 @@ impl Traverser {
let num_threads = std::env::var_os(key)
.map(|v| v.to_string_lossy().to_string())
.and_then(|v| usize::from_str(&v).ok())
.unwrap_or(min(num_cpus::get(), 4));
.unwrap_or_else(|| min(num_cpus::get(), 4));
info!("Browsing collection using {} threads", num_threads);
let mut threads = Vec::new();
@ -107,14 +107,12 @@ impl Worker {
if self.is_all_work_done() {
return None;
}
if let Ok(w) = self
.work_item_receiver
.recv_timeout(Duration::from_millis(100))
{
if let Ok(w) = self
.work_item_receiver
.recv_timeout(Duration::from_millis(100))
{
return Some(w);
}
};
return Some(w);
}
}
}
@ -167,12 +165,10 @@ impl Worker {
if path.is_dir() {
sub_directories.push(path);
} else if let Some(metadata) = metadata::read(&path) {
songs.push(Song { path, metadata });
} else {
if let Some(metadata) = metadata::read(&path) {
songs.push(Song { path, metadata });
} else {
other_files.push(path);
}
other_files.push(path);
}
}
@ -180,7 +176,7 @@ impl Worker {
self.emit_directory(Directory {
path: work_item.path.to_owned(),
parent: work_item.parent.map(|p| p.to_owned()),
parent: work_item.parent,
songs,
other_files,
created,
@ -195,7 +191,7 @@ impl Worker {
}
fn get_date_created(path: &Path) -> Option<i32> {
if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or(m.modified())) {
if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or_else(|_| m.modified())) {
t.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs() as i32)
.ok()

View file

@ -1,43 +1,27 @@
use anyhow::*;
use rustfm_scrobble::{Scrobble, Scrobbler};
use serde::Deserialize;
use std::path::Path;
use user::AuthToken;
use crate::app::{index::Index, user};
use crate::app::{
index::{Index, QueryError},
user,
};
const LASTFM_API_KEY: &str = "02b96c939a2b451c31dfd67add1f696e";
const LASTFM_API_SECRET: &str = "0f25a80ceef4b470b5cb97d99d4b3420";
#[derive(Debug, Deserialize)]
struct AuthResponseSessionName {
#[serde(rename = "$value")]
pub body: String,
}
#[derive(Debug, Deserialize)]
struct AuthResponseSessionKey {
#[serde(rename = "$value")]
pub body: String,
}
#[derive(Debug, Deserialize)]
struct AuthResponseSessionSubscriber {
#[serde(rename = "$value")]
pub body: i32,
}
#[derive(Debug, Deserialize)]
struct AuthResponseSession {
pub name: AuthResponseSessionName,
pub key: AuthResponseSessionKey,
pub subscriber: AuthResponseSessionSubscriber,
}
#[derive(Debug, Deserialize)]
struct AuthResponse {
pub status: String,
pub session: AuthResponseSession,
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Failed to authenticate with last.fm")]
ScrobblerAuthentication(rustfm_scrobble::ScrobblerError),
#[error("Failed to emit last.fm scrobble")]
Scrobble(rustfm_scrobble::ScrobblerError),
#[error("Failed to emit last.fm now playing update")]
NowPlaying(rustfm_scrobble::ScrobblerError),
#[error(transparent)]
Query(#[from] QueryError),
#[error(transparent)]
User(#[from] user::Error),
}
#[derive(Clone)]
@ -54,44 +38,50 @@ impl Manager {
}
}
pub fn generate_link_token(&self, username: &str) -> Result<AuthToken> {
pub fn generate_link_token(&self, username: &str) -> Result<AuthToken, Error> {
self.user_manager
.generate_lastfm_link_token(username)
.map_err(|e| e.into())
}
pub fn link(&self, username: &str, lastfm_token: &str) -> Result<()> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY.into(), LASTFM_API_SECRET.into());
let auth_response = scrobbler.authenticate_with_token(lastfm_token)?;
pub fn link(&self, username: &str, lastfm_token: &str) -> Result<(), Error> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY, LASTFM_API_SECRET);
let auth_response = scrobbler
.authenticate_with_token(lastfm_token)
.map_err(Error::ScrobblerAuthentication)?;
self.user_manager
.lastfm_link(username, &auth_response.name, &auth_response.key)
.map_err(|e| e.into())
}
pub fn unlink(&self, username: &str) -> Result<()> {
self.user_manager.lastfm_unlink(username)
pub fn unlink(&self, username: &str) -> Result<(), Error> {
self.user_manager
.lastfm_unlink(username)
.map_err(|e| e.into())
}
pub fn scrobble(&self, username: &str, track: &Path) -> Result<()> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY.into(), LASTFM_API_SECRET.into());
pub fn scrobble(&self, username: &str, track: &Path) -> Result<(), Error> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY, LASTFM_API_SECRET);
let scrobble = self.scrobble_from_path(track)?;
let auth_token = self.user_manager.get_lastfm_session_key(username)?;
scrobbler.authenticate_with_session_key(&auth_token);
scrobbler.scrobble(&scrobble)?;
scrobbler.scrobble(&scrobble).map_err(Error::Scrobble)?;
Ok(())
}
pub fn now_playing(&self, username: &str, track: &Path) -> Result<()> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY.into(), LASTFM_API_SECRET.into());
pub fn now_playing(&self, username: &str, track: &Path) -> Result<(), Error> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY, LASTFM_API_SECRET);
let scrobble = self.scrobble_from_path(track)?;
let auth_token = self.user_manager.get_lastfm_session_key(username)?;
scrobbler.authenticate_with_session_key(&auth_token);
scrobbler.now_playing(&scrobble)?;
scrobbler
.now_playing(&scrobble)
.map_err(Error::NowPlaying)?;
Ok(())
}
fn scrobble_from_path(&self, track: &Path) -> Result<Scrobble> {
fn scrobble_from_path(&self, track: &Path) -> Result<Scrobble, Error> {
let song = self.index.get_song(track)?;
Ok(Scrobble::new(
song.artist.as_deref().unwrap_or(""),

View file

@ -1,3 +0,0 @@
mod manager;
pub use manager::*;

367
src/app/playlist.rs Normal file
View file

@ -0,0 +1,367 @@
use core::clone::Clone;
use diesel::prelude::*;
use diesel::sql_types;
use diesel::BelongingToDsl;
use std::path::Path;
use crate::app::index::Song;
use crate::app::vfs;
use crate::db::{self, playlist_songs, playlists, users, DB};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Database(#[from] diesel::result::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error("User not found")]
UserNotFound,
#[error("Playlist not found")]
PlaylistNotFound,
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
#[derive(Clone)]
pub struct Manager {
db: DB,
vfs_manager: vfs::Manager,
}
impl Manager {
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
Self { db, vfs_manager }
}
pub fn list_playlists(&self, owner: &str) -> Result<Vec<String>, Error> {
let mut connection = self.db.connect()?;
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&mut connection)
.optional()?
.ok_or(Error::UserNotFound)?
};
{
use self::playlists::dsl::*;
let found_playlists: Vec<String> = Playlist::belonging_to(&user)
.select(name)
.load(&mut connection)?;
Ok(found_playlists)
}
}
pub fn save_playlist(
&self,
playlist_name: &str,
owner: &str,
content: &[String],
) -> Result<(), Error> {
let new_playlist: NewPlaylist;
let playlist: Playlist;
let vfs = self.vfs_manager.get_vfs()?;
{
let mut connection = self.db.connect()?;
// Find owner
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&mut connection)
.optional()?
.ok_or(Error::UserNotFound)?
};
// Create playlist
new_playlist = NewPlaylist {
name: playlist_name.into(),
owner: user.id,
};
diesel::insert_into(playlists::table)
.values(&new_playlist)
.execute(&mut connection)?;
playlist = {
use self::playlists::dsl::*;
playlists
.select((id, owner))
.filter(name.eq(playlist_name).and(owner.eq(user.id)))
.get_result(&mut connection)?
}
}
let mut new_songs: Vec<NewPlaylistSong> = Vec::new();
new_songs.reserve(content.len());
for (i, path) in content.iter().enumerate() {
let virtual_path = Path::new(&path);
if let Some(real_path) = vfs
.virtual_to_real(virtual_path)
.ok()
.and_then(|p| p.to_str().map(|s| s.to_owned()))
{
new_songs.push(NewPlaylistSong {
playlist: playlist.id,
path: real_path,
ordering: i as i32,
});
}
}
{
let mut connection = self.db.connect()?;
connection.transaction::<_, diesel::result::Error, _>(|connection| {
// Delete old content (if any)
let old_songs = PlaylistSong::belonging_to(&playlist);
diesel::delete(old_songs).execute(connection)?;
// Insert content
diesel::insert_into(playlist_songs::table)
.values(&new_songs)
.execute(&mut *connection)?; // TODO https://github.com/diesel-rs/diesel/issues/1822
Ok(())
})?;
}
Ok(())
}
pub fn read_playlist(&self, playlist_name: &str, owner: &str) -> Result<Vec<Song>, Error> {
let vfs = self.vfs_manager.get_vfs()?;
let songs: Vec<Song>;
{
let mut connection = self.db.connect()?;
// Find owner
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&mut connection)
.optional()?
.ok_or(Error::UserNotFound)?
};
// Find playlist
let playlist: Playlist = {
use self::playlists::dsl::*;
playlists
.select((id, owner))
.filter(name.eq(playlist_name).and(owner.eq(user.id)))
.get_result(&mut connection)
.optional()?
.ok_or(Error::PlaylistNotFound)?
};
// Select songs. Not using Diesel because we need to LEFT JOIN using a custom column
let query = diesel::sql_query(
r#"
SELECT s.id, s.path, s.parent, s.track_number, s.disc_number, s.title, s.artist, s.album_artist, s.year, s.album, s.artwork, s.duration, s.lyricist, s.composer, s.genre, s.label
FROM playlist_songs ps
LEFT JOIN songs s ON ps.path = s.path
WHERE ps.playlist = ?
ORDER BY ps.ordering
"#,
);
let query = query.bind::<sql_types::Integer, _>(playlist.id);
songs = query.get_results(&mut connection)?;
}
// Map real path to virtual paths
let virtual_songs = songs
.into_iter()
.filter_map(|s| s.virtualize(&vfs))
.collect();
Ok(virtual_songs)
}
pub fn delete_playlist(&self, playlist_name: &str, owner: &str) -> Result<(), Error> {
let mut connection = self.db.connect()?;
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&mut connection)
.optional()?
.ok_or(Error::UserNotFound)?
};
{
use self::playlists::dsl::*;
let q = Playlist::belonging_to(&user).filter(name.eq(playlist_name));
match diesel::delete(q).execute(&mut connection)? {
0 => Err(Error::PlaylistNotFound),
_ => Ok(()),
}
}
}
}
#[derive(Identifiable, Queryable, Associations)]
#[diesel(belongs_to(User, foreign_key = owner))]
struct Playlist {
id: i32,
owner: i32,
}
#[derive(Identifiable, Queryable, Associations)]
#[diesel(belongs_to(Playlist, foreign_key = playlist))]
struct PlaylistSong {
id: i32,
playlist: i32,
}
#[derive(Insertable)]
#[diesel(table_name = playlists)]
struct NewPlaylist {
name: String,
owner: i32,
}
#[derive(Insertable)]
#[diesel(table_name = playlist_songs)]
struct NewPlaylistSong {
playlist: i32,
path: String,
ordering: i32,
}
#[derive(Identifiable, Queryable)]
struct User {
id: i32,
}
#[cfg(test)]
mod test {
use std::path::{Path, PathBuf};
use crate::app::test;
use crate::test_name;
const TEST_USER: &str = "test_user";
const TEST_PASSWORD: &str = "password";
const TEST_PLAYLIST_NAME: &str = "Chill & Grill";
const TEST_MOUNT_NAME: &str = "root";
#[test]
fn save_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.build();
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &Vec::new())
.unwrap();
let found_playlists = ctx.playlist_manager.list_playlists(TEST_USER).unwrap();
assert_eq!(found_playlists.len(), 1);
assert_eq!(found_playlists[0], TEST_PLAYLIST_NAME);
}
#[test]
fn save_playlist_is_idempotent() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build();
ctx.index.update().unwrap();
let playlist_content: Vec<String> = ctx
.index
.flatten(Path::new(TEST_MOUNT_NAME))
.unwrap()
.into_iter()
.map(|s| s.path)
.collect();
assert_eq!(playlist_content.len(), 13);
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
let songs = ctx
.playlist_manager
.read_playlist(TEST_PLAYLIST_NAME, TEST_USER)
.unwrap();
assert_eq!(songs.len(), 13);
}
#[test]
fn delete_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.build();
let playlist_content = Vec::new();
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
ctx.playlist_manager
.delete_playlist(TEST_PLAYLIST_NAME, TEST_USER)
.unwrap();
let found_playlists = ctx.playlist_manager.list_playlists(TEST_USER).unwrap();
assert_eq!(found_playlists.len(), 0);
}
#[test]
fn read_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build();
ctx.index.update().unwrap();
let playlist_content: Vec<String> = ctx
.index
.flatten(Path::new(TEST_MOUNT_NAME))
.unwrap()
.into_iter()
.map(|s| s.path)
.collect();
assert_eq!(playlist_content.len(), 13);
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
let songs = ctx
.playlist_manager
.read_playlist(TEST_PLAYLIST_NAME, TEST_USER)
.unwrap();
assert_eq!(songs.len(), 13);
assert_eq!(songs[0].title, Some("Above The Water".to_owned()));
let first_song_path: PathBuf = [
TEST_MOUNT_NAME,
"Khemmis",
"Hunted",
"01 - Above The Water.mp3",
]
.iter()
.collect();
assert_eq!(songs[0].path, first_song_path.to_str().unwrap());
}
}

View file

@ -1,15 +0,0 @@
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("User not found")]
UserNotFound,
#[error("Playlist not found")]
PlaylistNotFound,
#[error("Unspecified")]
Unspecified,
}
impl From<anyhow::Error> for Error {
fn from(_: anyhow::Error) -> Self {
Error::Unspecified
}
}

View file

@ -1,247 +0,0 @@
use anyhow::Result;
use core::clone::Clone;
use diesel;
use diesel::prelude::*;
use diesel::sql_types;
use diesel::BelongingToDsl;
use std::path::Path;
use super::*;
use crate::app::index::Song;
use crate::app::vfs;
use crate::db::{playlist_songs, playlists, users, DB};
#[derive(Clone)]
pub struct Manager {
db: DB,
vfs_manager: vfs::Manager,
}
impl Manager {
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
Self { db, vfs_manager }
}
pub fn list_playlists(&self, owner: &str) -> Result<Vec<String>, Error> {
let connection = self.db.connect()?;
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&connection)
.optional()
.map_err(anyhow::Error::new)?
.ok_or(Error::UserNotFound)?
};
{
use self::playlists::dsl::*;
let found_playlists: Vec<String> = Playlist::belonging_to(&user)
.select(name)
.load(&connection)
.map_err(anyhow::Error::new)?;
Ok(found_playlists)
}
}
pub fn save_playlist(
&self,
playlist_name: &str,
owner: &str,
content: &[String],
) -> Result<(), Error> {
let new_playlist: NewPlaylist;
let playlist: Playlist;
let vfs = self.vfs_manager.get_vfs()?;
{
let connection = self.db.connect()?;
// Find owner
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&connection)
.optional()
.map_err(anyhow::Error::new)?
.ok_or(Error::UserNotFound)?
};
// Create playlist
new_playlist = NewPlaylist {
name: playlist_name.into(),
owner: user.id,
};
diesel::insert_into(playlists::table)
.values(&new_playlist)
.execute(&connection)
.map_err(anyhow::Error::new)?;
playlist = {
use self::playlists::dsl::*;
playlists
.select((id, owner))
.filter(name.eq(playlist_name).and(owner.eq(user.id)))
.get_result(&connection)
.map_err(anyhow::Error::new)?
}
}
let mut new_songs: Vec<NewPlaylistSong> = Vec::new();
new_songs.reserve(content.len());
for (i, path) in content.iter().enumerate() {
let virtual_path = Path::new(&path);
if let Some(real_path) = vfs
.virtual_to_real(virtual_path)
.ok()
.and_then(|p| p.to_str().map(|s| s.to_owned()))
{
new_songs.push(NewPlaylistSong {
playlist: playlist.id,
path: real_path,
ordering: i as i32,
});
}
}
{
let connection = self.db.connect()?;
connection
.transaction::<_, diesel::result::Error, _>(|| {
// Delete old content (if any)
let old_songs = PlaylistSong::belonging_to(&playlist);
diesel::delete(old_songs).execute(&connection)?;
// Insert content
diesel::insert_into(playlist_songs::table)
.values(&new_songs)
.execute(&*connection)?; // TODO https://github.com/diesel-rs/diesel/issues/1822
Ok(())
})
.map_err(anyhow::Error::new)?;
}
Ok(())
}
pub fn read_playlist(&self, playlist_name: &str, owner: &str) -> Result<Vec<Song>, Error> {
let vfs = self.vfs_manager.get_vfs()?;
let songs: Vec<Song>;
{
let connection = self.db.connect()?;
// Find owner
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&connection)
.optional()
.map_err(anyhow::Error::new)?
.ok_or(Error::UserNotFound)?
};
// Find playlist
let playlist: Playlist = {
use self::playlists::dsl::*;
playlists
.select((id, owner))
.filter(name.eq(playlist_name).and(owner.eq(user.id)))
.get_result(&connection)
.optional()
.map_err(anyhow::Error::new)?
.ok_or(Error::PlaylistNotFound)?
};
// Select songs. Not using Diesel because we need to LEFT JOIN using a custom column
let query = diesel::sql_query(
r#"
SELECT s.id, s.path, s.parent, s.track_number, s.disc_number, s.title, s.artist, s.album_artist, s.year, s.album, s.artwork, s.duration
FROM playlist_songs ps
LEFT JOIN songs s ON ps.path = s.path
WHERE ps.playlist = ?
ORDER BY ps.ordering
"#,
);
let query = query.clone().bind::<sql_types::Integer, _>(playlist.id);
songs = query.get_results(&connection).map_err(anyhow::Error::new)?;
}
// Map real path to virtual paths
let virtual_songs = songs
.into_iter()
.filter_map(|s| s.virtualize(&vfs))
.collect();
Ok(virtual_songs)
}
pub fn delete_playlist(&self, playlist_name: &str, owner: &str) -> Result<(), Error> {
let connection = self.db.connect()?;
let user: User = {
use self::users::dsl::*;
users
.filter(name.eq(owner))
.select((id,))
.first(&connection)
.optional()
.map_err(anyhow::Error::new)?
.ok_or(Error::UserNotFound)?
};
{
use self::playlists::dsl::*;
let q = Playlist::belonging_to(&user).filter(name.eq(playlist_name));
match diesel::delete(q)
.execute(&connection)
.map_err(anyhow::Error::new)?
{
0 => Err(Error::PlaylistNotFound),
_ => Ok(()),
}
}
}
}
#[derive(Identifiable, Queryable, Associations)]
#[belongs_to(User, foreign_key = "owner")]
struct Playlist {
id: i32,
owner: i32,
}
#[derive(Identifiable, Queryable, Associations)]
#[belongs_to(Playlist, foreign_key = "playlist")]
struct PlaylistSong {
id: i32,
playlist: i32,
}
#[derive(Insertable)]
#[table_name = "playlists"]
struct NewPlaylist {
name: String,
owner: i32,
}
#[derive(Insertable)]
#[table_name = "playlist_songs"]
struct NewPlaylistSong {
playlist: i32,
path: String,
ordering: i32,
}
#[derive(Identifiable, Queryable)]
struct User {
id: i32,
}

View file

@ -1,7 +0,0 @@
mod error;
mod manager;
#[cfg(test)]
mod test;
pub use error::*;
pub use manager::*;

View file

@ -1,118 +0,0 @@
use std::path::{Path, PathBuf};
use crate::app::test;
use crate::test_name;
const TEST_USER: &str = "test_user";
const TEST_PASSWORD: &str = "password";
const TEST_PLAYLIST_NAME: &str = "Chill & Grill";
const TEST_MOUNT_NAME: &str = "root";
#[test]
fn save_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.build();
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &Vec::new())
.unwrap();
let found_playlists = ctx.playlist_manager.list_playlists(TEST_USER).unwrap();
assert_eq!(found_playlists.len(), 1);
assert_eq!(found_playlists[0], TEST_PLAYLIST_NAME);
}
#[test]
fn save_playlist_is_idempotent() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build();
ctx.index.update().unwrap();
let playlist_content: Vec<String> = ctx
.index
.flatten(Path::new(TEST_MOUNT_NAME))
.unwrap()
.into_iter()
.map(|s| s.path)
.collect();
assert_eq!(playlist_content.len(), 13);
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
let songs = ctx
.playlist_manager
.read_playlist(TEST_PLAYLIST_NAME, TEST_USER)
.unwrap();
assert_eq!(songs.len(), 13);
}
#[test]
fn delete_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.build();
let playlist_content = Vec::new();
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
ctx.playlist_manager
.delete_playlist(TEST_PLAYLIST_NAME, TEST_USER)
.unwrap();
let found_playlists = ctx.playlist_manager.list_playlists(TEST_USER).unwrap();
assert_eq!(found_playlists.len(), 0);
}
#[test]
fn read_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false)
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build();
ctx.index.update().unwrap();
let playlist_content: Vec<String> = ctx
.index
.flatten(Path::new(TEST_MOUNT_NAME))
.unwrap()
.into_iter()
.map(|s| s.path)
.collect();
assert_eq!(playlist_content.len(), 13);
ctx.playlist_manager
.save_playlist(TEST_PLAYLIST_NAME, TEST_USER, &playlist_content)
.unwrap();
let songs = ctx
.playlist_manager
.read_playlist(TEST_PLAYLIST_NAME, TEST_USER)
.unwrap();
assert_eq!(songs.len(), 13);
assert_eq!(songs[0].title, Some("Above The Water".to_owned()));
let first_song_path: PathBuf = [
TEST_MOUNT_NAME,
"Khemmis",
"Hunted",
"01 - Above The Water.mp3",
]
.iter()
.collect();
assert_eq!(songs[0].path, first_song_path.to_str().unwrap());
}

114
src/app/settings.rs Normal file
View file

@ -0,0 +1,114 @@
use diesel::prelude::*;
use regex::Regex;
use serde::Deserialize;
use std::convert::TryInto;
use std::time::Duration;
use crate::db::{self, misc_settings, DB};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Auth secret does not have the expected format")]
AuthenticationSecretInvalid,
#[error("Missing auth secret")]
AuthenticationSecretNotFound,
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error("Missing settings")]
MiscSettingsNotFound,
#[error("Index album art pattern is not a valid regex")]
IndexAlbumArtPatternInvalid,
#[error(transparent)]
Database(#[from] diesel::result::Error),
}
#[derive(Clone, Default)]
pub struct AuthSecret {
pub key: [u8; 32],
}
#[derive(Debug, Queryable)]
pub struct Settings {
pub index_sleep_duration_seconds: i32,
pub index_album_art_pattern: String,
}
#[derive(Debug, Default, Deserialize)]
pub struct NewSettings {
pub reindex_every_n_seconds: Option<i32>,
pub album_art_pattern: Option<String>,
}
#[derive(Clone)]
pub struct Manager {
pub db: DB,
}
impl Manager {
pub fn new(db: DB) -> Self {
Self { db }
}
pub fn get_auth_secret(&self) -> Result<AuthSecret, Error> {
use self::misc_settings::dsl::*;
let mut connection = self.db.connect()?;
let secret: Vec<u8> = misc_settings
.select(auth_secret)
.get_result(&mut connection)
.map_err(|e| match e {
diesel::result::Error::NotFound => Error::AuthenticationSecretNotFound,
e => e.into(),
})?;
secret
.try_into()
.map_err(|_| Error::AuthenticationSecretInvalid)
.map(|key| AuthSecret { key })
}
pub fn get_index_sleep_duration(&self) -> Result<Duration, Error> {
let settings = self.read()?;
Ok(Duration::from_secs(
settings.index_sleep_duration_seconds as u64,
))
}
pub fn get_index_album_art_pattern(&self) -> Result<Regex, Error> {
let settings = self.read()?;
let regex = Regex::new(&format!("(?i){}", &settings.index_album_art_pattern))
.map_err(|_| Error::IndexAlbumArtPatternInvalid)?;
Ok(regex)
}
pub fn read(&self) -> Result<Settings, Error> {
use self::misc_settings::dsl::*;
let mut connection = self.db.connect()?;
let settings: Settings = misc_settings
.select((index_sleep_duration_seconds, index_album_art_pattern))
.get_result(&mut connection)
.map_err(|e| match e {
diesel::result::Error::NotFound => Error::MiscSettingsNotFound,
e => e.into(),
})?;
Ok(settings)
}
pub fn amend(&self, new_settings: &NewSettings) -> Result<(), Error> {
let mut connection = self.db.connect()?;
if let Some(sleep_duration) = new_settings.reindex_every_n_seconds {
diesel::update(misc_settings::table)
.set(misc_settings::index_sleep_duration_seconds.eq(sleep_duration as i32))
.execute(&mut connection)?;
}
if let Some(ref album_art_pattern) = new_settings.album_art_pattern {
diesel::update(misc_settings::table)
.set(misc_settings::index_album_art_pattern.eq(album_art_pattern))
.execute(&mut connection)?;
}
Ok(())
}
}

View file

@ -1,21 +0,0 @@
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Missing auth secret")]
AuthSecretNotFound,
#[error("Auth secret does not have the expected format")]
InvalidAuthSecret,
#[error("Missing index sleep duration")]
IndexSleepDurationNotFound,
#[error("Missing index album art pattern")]
IndexAlbumArtPatternNotFound,
#[error("Index album art pattern is not a valid regex")]
IndexAlbumArtPatternInvalid,
#[error("Unspecified")]
Unspecified,
}
impl From<anyhow::Error> for Error {
fn from(_: anyhow::Error) -> Self {
Error::Unspecified
}
}

View file

@ -1,97 +0,0 @@
use diesel;
use diesel::prelude::*;
use regex::Regex;
use std::convert::TryInto;
use std::time::Duration;
use super::*;
use crate::db::{misc_settings, DB};
#[derive(Clone)]
pub struct Manager {
pub db: DB,
}
impl Manager {
pub fn new(db: DB) -> Self {
Self { db }
}
pub fn get_auth_secret(&self) -> Result<AuthSecret, Error> {
use self::misc_settings::dsl::*;
let connection = self.db.connect()?;
let secret: Vec<u8> = misc_settings
.select(auth_secret)
.get_result(&connection)
.map_err(|e| match e {
diesel::result::Error::NotFound => Error::AuthSecretNotFound,
_ => Error::Unspecified,
})?;
secret
.try_into()
.map_err(|_| Error::InvalidAuthSecret)
.map(|key| AuthSecret { key })
}
pub fn get_index_sleep_duration(&self) -> Result<Duration, Error> {
use self::misc_settings::dsl::*;
let connection = self.db.connect()?;
misc_settings
.select(index_sleep_duration_seconds)
.get_result(&connection)
.map_err(|e| match e {
diesel::result::Error::NotFound => Error::IndexSleepDurationNotFound,
_ => Error::Unspecified,
})
.map(|s: i32| Duration::from_secs(s as u64))
}
pub fn get_index_album_art_pattern(&self) -> Result<Regex, Error> {
use self::misc_settings::dsl::*;
let connection = self.db.connect()?;
misc_settings
.select(index_album_art_pattern)
.get_result(&connection)
.map_err(|e| match e {
diesel::result::Error::NotFound => Error::IndexAlbumArtPatternNotFound,
_ => Error::Unspecified,
})
.and_then(|s: String| {
Regex::new(&format!("(?i){}", &s)).map_err(|_| Error::IndexAlbumArtPatternInvalid)
})
}
pub fn read(&self) -> Result<Settings, Error> {
let connection = self.db.connect()?;
let misc: MiscSettings = misc_settings::table
.get_result(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(Settings {
auth_secret: misc.auth_secret,
album_art_pattern: misc.index_album_art_pattern,
reindex_every_n_seconds: misc.index_sleep_duration_seconds,
})
}
pub fn amend(&self, new_settings: &NewSettings) -> Result<(), Error> {
let connection = self.db.connect()?;
if let Some(sleep_duration) = new_settings.reindex_every_n_seconds {
diesel::update(misc_settings::table)
.set(misc_settings::index_sleep_duration_seconds.eq(sleep_duration as i32))
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ref album_art_pattern) = new_settings.album_art_pattern {
diesel::update(misc_settings::table)
.set(misc_settings::index_album_art_pattern.eq(album_art_pattern))
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
}
Ok(())
}
}

View file

@ -1,33 +0,0 @@
use serde::Deserialize;
mod error;
mod manager;
pub use error::*;
pub use manager::*;
#[derive(Clone, Default)]
pub struct AuthSecret {
pub key: [u8; 32],
}
#[derive(Debug, Queryable)]
struct MiscSettings {
id: i32,
auth_secret: Vec<u8>,
index_sleep_duration_seconds: i32,
index_album_art_pattern: String,
}
#[derive(Debug)]
pub struct Settings {
auth_secret: Vec<u8>,
pub reindex_every_n_seconds: i32,
pub album_art_pattern: String,
}
#[derive(Debug, Default, Deserialize)]
pub struct NewSettings {
pub reindex_every_n_seconds: Option<i32>,
pub album_art_pattern: Option<String>,
}

276
src/app/thumbnail.rs Normal file
View file

@ -0,0 +1,276 @@
use image::{DynamicImage, GenericImage, GenericImageView, ImageBuffer, ImageOutputFormat};
use std::cmp;
use std::collections::hash_map::DefaultHasher;
use std::fs::{self, File};
use std::hash::{Hash, Hasher};
use std::path::{Path, PathBuf};
use crate::utils::{get_audio_format, AudioFormat};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("No embedded artwork was found in `{0}`")]
EmbeddedArtworkNotFound(PathBuf),
#[error("Could not read thumbnail from ID3 tag in `{0}`:\n\n{1}")]
Id3(PathBuf, id3::Error),
#[error("Could not read thumbnail image in `{0}`:\n\n{1}")]
Image(PathBuf, image::error::ImageError),
#[error("Filesystem error for `{0}`: `{1}`")]
Io(PathBuf, std::io::Error),
#[error("Could not read thumbnail from flac file in `{0}`:\n\n{1}")]
Metaflac(PathBuf, metaflac::Error),
#[error("Could not read thumbnail from mp4 file in `{0}`:\n\n{1}")]
Mp4aMeta(PathBuf, mp4ameta::Error),
#[error("This file format is not supported: {0}")]
UnsupportedFormat(&'static str),
}
#[derive(Debug, Hash)]
pub struct Options {
pub max_dimension: Option<u32>,
pub resize_if_almost_square: bool,
pub pad_to_square: bool,
}
impl Default for Options {
fn default() -> Self {
Self {
max_dimension: Some(400),
resize_if_almost_square: true,
pad_to_square: true,
}
}
}
#[derive(Clone)]
pub struct Manager {
thumbnails_dir_path: PathBuf,
}
impl Manager {
pub fn new(thumbnails_dir_path: PathBuf) -> Self {
Self {
thumbnails_dir_path,
}
}
pub fn get_thumbnail(
&self,
image_path: &Path,
thumbnailoptions: &Options,
) -> Result<PathBuf, Error> {
match self.retrieve_thumbnail(image_path, thumbnailoptions) {
Some(path) => Ok(path),
None => self.create_thumbnail(image_path, thumbnailoptions),
}
}
fn get_thumbnail_path(&self, image_path: &Path, thumbnailoptions: &Options) -> PathBuf {
let hash = Manager::hash(image_path, thumbnailoptions);
let mut thumbnail_path = self.thumbnails_dir_path.clone();
thumbnail_path.push(format!("{}.jpg", hash));
thumbnail_path
}
fn retrieve_thumbnail(&self, image_path: &Path, thumbnailoptions: &Options) -> Option<PathBuf> {
let path = self.get_thumbnail_path(image_path, thumbnailoptions);
if path.exists() {
Some(path)
} else {
None
}
}
fn create_thumbnail(
&self,
image_path: &Path,
thumbnailoptions: &Options,
) -> Result<PathBuf, Error> {
let thumbnail = generate_thumbnail(image_path, thumbnailoptions)?;
let quality = 80;
fs::create_dir_all(&self.thumbnails_dir_path)
.map_err(|e| Error::Io(self.thumbnails_dir_path.clone(), e))?;
let path = self.get_thumbnail_path(image_path, thumbnailoptions);
let mut out_file =
File::create(&path).map_err(|e| Error::Io(self.thumbnails_dir_path.clone(), e))?;
thumbnail
.write_to(&mut out_file, ImageOutputFormat::Jpeg(quality))
.map_err(|e| Error::Image(image_path.to_owned(), e))?;
Ok(path)
}
fn hash(path: &Path, thumbnailoptions: &Options) -> u64 {
let mut hasher = DefaultHasher::new();
path.hash(&mut hasher);
thumbnailoptions.hash(&mut hasher);
hasher.finish()
}
}
fn generate_thumbnail(image_path: &Path, options: &Options) -> Result<DynamicImage, Error> {
let source_image = DynamicImage::ImageRgb8(read(image_path)?.into_rgb8());
let (source_width, source_height) = source_image.dimensions();
let largest_dimension = cmp::max(source_width, source_height);
let out_dimension = cmp::min(
options.max_dimension.unwrap_or(largest_dimension),
largest_dimension,
);
let source_aspect_ratio: f32 = source_width as f32 / source_height as f32;
let is_almost_square = source_aspect_ratio > 0.8 && source_aspect_ratio < 1.2;
let mut final_image;
if is_almost_square && options.resize_if_almost_square {
final_image = source_image.thumbnail_exact(out_dimension, out_dimension);
} else if options.pad_to_square {
let scaled_image = source_image.thumbnail(out_dimension, out_dimension);
let (scaled_width, scaled_height) = scaled_image.dimensions();
let background = image::Rgb([255, 255_u8, 255_u8]);
final_image = DynamicImage::ImageRgb8(ImageBuffer::from_pixel(
out_dimension,
out_dimension,
background,
));
final_image
.copy_from(
&scaled_image,
(out_dimension - scaled_width) / 2,
(out_dimension - scaled_height) / 2,
)
.map_err(|e| Error::Image(image_path.to_owned(), e))?;
} else {
final_image = source_image.thumbnail(out_dimension, out_dimension);
}
Ok(final_image)
}
fn read(image_path: &Path) -> Result<DynamicImage, Error> {
match get_audio_format(image_path) {
Some(AudioFormat::AIFF) => read_aiff(image_path),
Some(AudioFormat::APE) => read_ape(image_path),
Some(AudioFormat::FLAC) => read_flac(image_path),
Some(AudioFormat::MP3) => read_mp3(image_path),
Some(AudioFormat::MP4) => read_mp4(image_path),
Some(AudioFormat::MPC) => read_ape(image_path),
Some(AudioFormat::OGG) => read_vorbis(image_path),
Some(AudioFormat::OPUS) => read_opus(image_path),
Some(AudioFormat::WAVE) => read_wave(image_path),
None => image::open(image_path).map_err(|e| Error::Image(image_path.to_owned(), e)),
}
}
fn read_ape(_: &Path) -> Result<DynamicImage, Error> {
Err(Error::UnsupportedFormat("ape"))
}
fn read_flac(path: &Path) -> Result<DynamicImage, Error> {
let tag =
metaflac::Tag::read_from_path(path).map_err(|e| Error::Metaflac(path.to_owned(), e))?;
if let Some(p) = tag.pictures().next() {
return image::load_from_memory(&p.data).map_err(|e| Error::Image(path.to_owned(), e));
}
Err(Error::EmbeddedArtworkNotFound(path.to_owned()))
}
fn read_mp3(path: &Path) -> Result<DynamicImage, Error> {
let tag = id3::Tag::read_from_path(path).map_err(|e| Error::Id3(path.to_owned(), e))?;
read_id3(path, &tag)
}
fn read_aiff(path: &Path) -> Result<DynamicImage, Error> {
let tag = id3::Tag::read_from_aiff_path(path).map_err(|e| Error::Id3(path.to_owned(), e))?;
read_id3(path, &tag)
}
fn read_wave(path: &Path) -> Result<DynamicImage, Error> {
let tag = id3::Tag::read_from_wav_path(path).map_err(|e| Error::Id3(path.to_owned(), e))?;
read_id3(path, &tag)
}
fn read_id3(path: &Path, tag: &id3::Tag) -> Result<DynamicImage, Error> {
tag.pictures()
.next()
.ok_or_else(|| Error::EmbeddedArtworkNotFound(path.to_owned()))
.and_then(|d| {
image::load_from_memory(&d.data).map_err(|e| Error::Image(path.to_owned(), e))
})
}
fn read_mp4(path: &Path) -> Result<DynamicImage, Error> {
let tag =
mp4ameta::Tag::read_from_path(path).map_err(|e| Error::Mp4aMeta(path.to_owned(), e))?;
tag.artwork()
.ok_or_else(|| Error::EmbeddedArtworkNotFound(path.to_owned()))
.and_then(|d| image::load_from_memory(d.data).map_err(|e| Error::Image(path.to_owned(), e)))
}
fn read_vorbis(_: &Path) -> Result<DynamicImage, Error> {
Err(Error::UnsupportedFormat("vorbis"))
}
fn read_opus(_: &Path) -> Result<DynamicImage, Error> {
Err(Error::UnsupportedFormat("opus"))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn can_read_artwork_data() {
let ext_img = image::open("test-data/artwork/Folder.png")
.unwrap()
.to_rgb8();
let embedded_img = image::open("test-data/artwork/Embedded.png")
.unwrap()
.to_rgb8();
let folder_img = read(Path::new("test-data/artwork/Folder.png"))
.unwrap()
.to_rgb8();
assert_eq!(folder_img, ext_img);
let aiff_img = read(Path::new("test-data/artwork/sample.aif"))
.unwrap()
.to_rgb8();
assert_eq!(aiff_img, embedded_img);
let ape_img = read(Path::new("test-data/artwork/sample.ape"))
.map(|d| d.to_rgb8())
.ok();
assert_eq!(ape_img, None);
let flac_img = read(Path::new("test-data/artwork/sample.flac"))
.unwrap()
.to_rgb8();
assert_eq!(flac_img, embedded_img);
let mp3_img = read(Path::new("test-data/artwork/sample.mp3"))
.unwrap()
.to_rgb8();
assert_eq!(mp3_img, embedded_img);
let m4a_img = read(Path::new("test-data/artwork/sample.m4a"))
.unwrap()
.to_rgb8();
assert_eq!(m4a_img, embedded_img);
let ogg_img = read(Path::new("test-data/artwork/sample.ogg"))
.map(|d| d.to_rgb8())
.ok();
assert_eq!(ogg_img, None);
let opus_img = read(Path::new("test-data/artwork/sample.opus"))
.map(|d| d.to_rgb8())
.ok();
assert_eq!(opus_img, None);
let wave_img = read(Path::new("test-data/artwork/sample.wav"))
.unwrap()
.to_rgb8();
assert_eq!(wave_img, embedded_img);
}
}

View file

@ -1,39 +0,0 @@
use anyhow::*;
use image::{DynamicImage, GenericImage, GenericImageView, ImageBuffer};
use std::cmp;
use std::path::*;
use crate::app::thumbnail::{read, Options};
pub fn generate_thumbnail(image_path: &Path, options: &Options) -> Result<DynamicImage> {
let source_image = DynamicImage::ImageRgb8(read(image_path)?.into_rgb8());
let (source_width, source_height) = source_image.dimensions();
let largest_dimension = cmp::max(source_width, source_height);
let out_dimension = cmp::min(options.max_dimension, largest_dimension);
let source_aspect_ratio: f32 = source_width as f32 / source_height as f32;
let is_almost_square = source_aspect_ratio > 0.8 && source_aspect_ratio < 1.2;
let mut final_image;
if is_almost_square && options.resize_if_almost_square {
final_image = source_image.thumbnail_exact(out_dimension, out_dimension);
} else if options.pad_to_square {
let scaled_image = source_image.thumbnail(out_dimension, out_dimension);
let (scaled_width, scaled_height) = scaled_image.dimensions();
let background = image::Rgb([255, 255 as u8, 255 as u8]);
final_image = DynamicImage::ImageRgb8(ImageBuffer::from_pixel(
out_dimension,
out_dimension,
background,
));
final_image.copy_from(
&scaled_image,
(out_dimension - scaled_width) / 2,
(out_dimension - scaled_height) / 2,
)?;
} else {
final_image = source_image.thumbnail(out_dimension, out_dimension);
}
Ok(final_image)
}

View file

@ -1,62 +0,0 @@
use anyhow::*;
use image::ImageOutputFormat;
use std::collections::hash_map::DefaultHasher;
use std::fs::{self, File};
use std::hash::{Hash, Hasher};
use std::path::{Path, PathBuf};
use crate::app::thumbnail::*;
#[derive(Clone)]
pub struct Manager {
thumbnails_dir_path: PathBuf,
}
impl Manager {
pub fn new(thumbnails_dir_path: PathBuf) -> Self {
Self {
thumbnails_dir_path,
}
}
pub fn get_thumbnail(&self, image_path: &Path, thumbnailoptions: &Options) -> Result<PathBuf> {
match self.retrieve_thumbnail(image_path, thumbnailoptions) {
Some(path) => Ok(path),
None => self.create_thumbnail(image_path, thumbnailoptions),
}
}
fn get_thumbnail_path(&self, image_path: &Path, thumbnailoptions: &Options) -> PathBuf {
let hash = Manager::hash(image_path, thumbnailoptions);
let mut thumbnail_path = self.thumbnails_dir_path.clone();
thumbnail_path.push(format!("{}.jpg", hash.to_string()));
thumbnail_path
}
fn retrieve_thumbnail(&self, image_path: &Path, thumbnailoptions: &Options) -> Option<PathBuf> {
let path = self.get_thumbnail_path(image_path, thumbnailoptions);
if path.exists() {
Some(path)
} else {
None
}
}
fn create_thumbnail(&self, image_path: &Path, thumbnailoptions: &Options) -> Result<PathBuf> {
let thumbnail = generate_thumbnail(image_path, thumbnailoptions)?;
let quality = 80;
fs::create_dir_all(&self.thumbnails_dir_path)?;
let path = self.get_thumbnail_path(image_path, thumbnailoptions);
let mut out_file = File::create(&path)?;
thumbnail.write_to(&mut out_file, ImageOutputFormat::Jpeg(quality))?;
Ok(path)
}
fn hash(path: &Path, thumbnailoptions: &Options) -> u64 {
let mut hasher = DefaultHasher::new();
path.hash(&mut hasher);
thumbnailoptions.hash(&mut hasher);
hasher.finish()
}
}

View file

@ -1,9 +0,0 @@
mod generate;
mod manager;
mod options;
mod read;
pub use generate::*;
pub use manager::*;
pub use options::*;
pub use read::*;

View file

@ -1,16 +0,0 @@
#[derive(Debug, Hash)]
pub struct Options {
pub max_dimension: u32,
pub resize_if_almost_square: bool,
pub pad_to_square: bool,
}
impl Default for Options {
fn default() -> Self {
Self {
max_dimension: 400,
resize_if_almost_square: true,
pad_to_square: true,
}
}
}

View file

@ -1,148 +0,0 @@
use anyhow::*;
use image::DynamicImage;
use std::path::Path;
use crate::utils;
use crate::utils::AudioFormat;
pub fn read(image_path: &Path) -> Result<DynamicImage> {
match utils::get_audio_format(image_path) {
Some(AudioFormat::AIFF) => read_aiff(image_path),
Some(AudioFormat::APE) => read_ape(image_path),
Some(AudioFormat::FLAC) => read_flac(image_path),
Some(AudioFormat::MP3) => read_mp3(image_path),
Some(AudioFormat::MP4) => read_mp4(image_path),
Some(AudioFormat::MPC) => read_ape(image_path),
Some(AudioFormat::OGG) => read_vorbis(image_path),
Some(AudioFormat::OPUS) => read_opus(image_path),
Some(AudioFormat::WAVE) => read_wave(image_path),
None => Ok(image::open(image_path)?),
}
}
fn read_ape(_: &Path) -> Result<DynamicImage> {
Err(crate::Error::msg(
"Embedded images are not supported in APE files",
))
}
fn read_flac(path: &Path) -> Result<DynamicImage> {
let tag = metaflac::Tag::read_from_path(path)?;
if let Some(p) = tag.pictures().next() {
return Ok(image::load_from_memory(&p.data)?);
}
Err(crate::Error::msg(format!(
"Embedded flac artwork not found for file: {}",
path.display()
)))
}
fn read_mp3(path: &Path) -> Result<DynamicImage> {
let tag = id3::Tag::read_from_path(path)?;
read_id3(&path, &tag)
}
fn read_aiff(path: &Path) -> Result<DynamicImage> {
let tag = id3::Tag::read_from_aiff(path)?;
read_id3(&path, &tag)
}
fn read_wave(path: &Path) -> Result<DynamicImage> {
let tag = id3::Tag::read_from_wav(path)?;
read_id3(&path, &tag)
}
fn read_id3(path: &Path, tag: &id3::Tag) -> Result<DynamicImage> {
if let Some(p) = tag.pictures().next() {
return Ok(image::load_from_memory(&p.data)?);
}
Err(crate::Error::msg(format!(
"Embedded id3 artwork not found for file: {}",
path.display()
)))
}
fn read_mp4(path: &Path) -> Result<DynamicImage> {
let tag = mp4ameta::Tag::read_from_path(path)?;
match tag.artwork().and_then(|d| d.image_data()) {
Some(v) => Ok(image::load_from_memory(v)?),
_ => Err(crate::Error::msg(format!(
"Embedded mp4 artwork not found for file: {}",
path.display()
))),
}
}
fn read_vorbis(_: &Path) -> Result<DynamicImage> {
Err(crate::Error::msg(
"Embedded images are not supported in Vorbis files",
))
}
fn read_opus(_: &Path) -> Result<DynamicImage> {
Err(crate::Error::msg(
"Embedded images are not supported in Opus files",
))
}
#[test]
fn can_read_artwork_data() {
let ext_img = image::open("test-data/artwork/Folder.png")
.unwrap()
.to_rgb8();
let embedded_img = image::open("test-data/artwork/Embedded.png")
.unwrap()
.to_rgb8();
let folder_img = read(Path::new("test-data/artwork/Folder.png"))
.unwrap()
.to_rgb8();
assert_eq!(folder_img, ext_img);
let aiff_img = read(Path::new("test-data/artwork/sample.aif"))
.unwrap()
.to_rgb8();
assert_eq!(aiff_img, embedded_img);
let ape_img = read(Path::new("test-data/artwork/sample.ape"))
.map(|d| d.to_rgb8())
.ok();
assert_eq!(ape_img, None);
let flac_img = read(Path::new("test-data/artwork/sample.flac"))
.unwrap()
.to_rgb8();
assert_eq!(flac_img, embedded_img);
let mp3_img = read(Path::new("test-data/artwork/sample.mp3"))
.unwrap()
.to_rgb8();
assert_eq!(mp3_img, embedded_img);
let m4a_img = read(Path::new("test-data/artwork/sample.m4a"))
.unwrap()
.to_rgb8();
assert_eq!(m4a_img, embedded_img);
let ogg_img = read(Path::new("test-data/artwork/sample.ogg"))
.map(|d| d.to_rgb8())
.ok();
assert_eq!(ogg_img, None);
let opus_img = read(Path::new("test-data/artwork/sample.opus"))
.map(|d| d.to_rgb8())
.ok();
assert_eq!(opus_img, None);
let wave_img = read(Path::new("test-data/artwork/sample.wav"))
.unwrap()
.to_rgb8();
assert_eq!(wave_img, embedded_img);
}

537
src/app/user.rs Normal file
View file

@ -0,0 +1,537 @@
use diesel::prelude::*;
use pbkdf2::password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString};
use pbkdf2::Pbkdf2;
use rand::rngs::OsRng;
use serde::{Deserialize, Serialize};
use std::time::{SystemTime, UNIX_EPOCH};
use crate::app::settings::AuthSecret;
use crate::db::{self, users, DB};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Database(#[from] diesel::result::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error("Cannot use empty username")]
EmptyUsername,
#[error("Cannot use empty password")]
EmptyPassword,
#[error("Username does not exist")]
IncorrectUsername,
#[error("Password does not match username")]
IncorrectPassword,
#[error("Invalid auth token")]
InvalidAuthToken,
#[error("Incorrect authorization scope")]
IncorrectAuthorizationScope,
#[error("Last.fm session key is missing")]
MissingLastFMSessionKey,
#[error("Failed to hash password")]
PasswordHashing,
#[error("Failed to encode authorization token")]
AuthorizationTokenEncoding,
#[error("Failed to encode Branca token")]
BrancaTokenEncoding,
}
#[derive(Debug, Insertable, Queryable)]
#[diesel(table_name = users)]
pub struct User {
pub name: String,
pub password_hash: String,
pub admin: i32,
}
impl User {
pub fn is_admin(&self) -> bool {
self.admin != 0
}
}
#[derive(Debug, Deserialize)]
pub struct NewUser {
pub name: String,
pub password: String,
pub admin: bool,
}
#[derive(Debug)]
pub struct AuthToken(pub String);
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
pub enum AuthorizationScope {
PolarisAuth,
LastFMLink,
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
pub struct Authorization {
pub username: String,
pub scope: AuthorizationScope,
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct Preferences {
pub lastfm_username: Option<String>,
pub web_theme_base: Option<String>,
pub web_theme_accent: Option<String>,
}
#[derive(Clone)]
pub struct Manager {
db: DB,
auth_secret: AuthSecret,
}
impl Manager {
pub fn new(db: DB, auth_secret: AuthSecret) -> Self {
Self { db, auth_secret }
}
pub fn create(&self, new_user: &NewUser) -> Result<(), Error> {
if new_user.name.is_empty() {
return Err(Error::EmptyUsername);
}
let password_hash = hash_password(&new_user.password)?;
let mut connection = self.db.connect()?;
let new_user = User {
name: new_user.name.to_owned(),
password_hash,
admin: new_user.admin as i32,
};
diesel::insert_into(users::table)
.values(&new_user)
.execute(&mut connection)?;
Ok(())
}
pub fn delete(&self, username: &str) -> Result<(), Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
diesel::delete(users.filter(name.eq(username))).execute(&mut connection)?;
Ok(())
}
pub fn set_password(&self, username: &str, password: &str) -> Result<(), Error> {
let hash = hash_password(password)?;
let mut connection = self.db.connect()?;
use crate::db::users::dsl::*;
diesel::update(users.filter(name.eq(username)))
.set(password_hash.eq(hash))
.execute(&mut connection)?;
Ok(())
}
pub fn set_is_admin(&self, username: &str, is_admin: bool) -> Result<(), Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
diesel::update(users.filter(name.eq(username)))
.set(admin.eq(is_admin as i32))
.execute(&mut connection)?;
Ok(())
}
pub fn login(&self, username: &str, password: &str) -> Result<AuthToken, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
match users
.select(password_hash)
.filter(name.eq(username))
.get_result(&mut connection)
{
Err(diesel::result::Error::NotFound) => Err(Error::IncorrectUsername),
Ok(hash) => {
let hash: String = hash;
if verify_password(&hash, password) {
let authorization = Authorization {
username: username.to_owned(),
scope: AuthorizationScope::PolarisAuth,
};
self.generate_auth_token(&authorization)
} else {
Err(Error::IncorrectPassword)
}
}
Err(e) => Err(e.into()),
}
}
pub fn authenticate(
&self,
auth_token: &AuthToken,
scope: AuthorizationScope,
) -> Result<Authorization, Error> {
let authorization = self.decode_auth_token(auth_token, scope)?;
if self.exists(&authorization.username)? {
Ok(authorization)
} else {
Err(Error::IncorrectUsername)
}
}
fn decode_auth_token(
&self,
auth_token: &AuthToken,
scope: AuthorizationScope,
) -> Result<Authorization, Error> {
let AuthToken(data) = auth_token;
let ttl = match scope {
AuthorizationScope::PolarisAuth => 0, // permanent
AuthorizationScope::LastFMLink => 10 * 60, // 10 minutes
};
let authorization = branca::decode(data, &self.auth_secret.key, ttl)
.map_err(|_| Error::InvalidAuthToken)?;
let authorization: Authorization =
serde_json::from_slice(&authorization[..]).map_err(|_| Error::InvalidAuthToken)?;
if authorization.scope != scope {
return Err(Error::IncorrectAuthorizationScope);
}
Ok(authorization)
}
fn generate_auth_token(&self, authorization: &Authorization) -> Result<AuthToken, Error> {
let serialized_authorization =
serde_json::to_string(&authorization).or(Err(Error::AuthorizationTokenEncoding))?;
branca::encode(
serialized_authorization.as_bytes(),
&self.auth_secret.key,
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs() as u32,
)
.or(Err(Error::BrancaTokenEncoding))
.map(AuthToken)
}
pub fn count(&self) -> Result<i64, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let count = users.count().get_result(&mut connection)?;
Ok(count)
}
pub fn list(&self) -> Result<Vec<User>, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let listed_users = users
.select((name, password_hash, admin))
.get_results(&mut connection)?;
Ok(listed_users)
}
pub fn exists(&self, username: &str) -> Result<bool, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let results: Vec<String> = users
.select(name)
.filter(name.eq(username))
.get_results(&mut connection)?;
Ok(!results.is_empty())
}
pub fn is_admin(&self, username: &str) -> Result<bool, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let is_admin: i32 = users
.filter(name.eq(username))
.select(admin)
.get_result(&mut connection)?;
Ok(is_admin != 0)
}
pub fn read_preferences(&self, username: &str) -> Result<Preferences, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let (theme_base, theme_accent, read_lastfm_username) = users
.select((web_theme_base, web_theme_accent, lastfm_username))
.filter(name.eq(username))
.get_result(&mut connection)?;
Ok(Preferences {
web_theme_base: theme_base,
web_theme_accent: theme_accent,
lastfm_username: read_lastfm_username,
})
}
pub fn write_preferences(
&self,
username: &str,
preferences: &Preferences,
) -> Result<(), Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
diesel::update(users.filter(name.eq(username)))
.set((
web_theme_base.eq(&preferences.web_theme_base),
web_theme_accent.eq(&preferences.web_theme_accent),
))
.execute(&mut connection)?;
Ok(())
}
pub fn lastfm_link(
&self,
username: &str,
lastfm_login: &str,
session_key: &str,
) -> Result<(), Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
diesel::update(users.filter(name.eq(username)))
.set((
lastfm_username.eq(lastfm_login),
lastfm_session_key.eq(session_key),
))
.execute(&mut connection)?;
Ok(())
}
pub fn generate_lastfm_link_token(&self, username: &str) -> Result<AuthToken, Error> {
self.generate_auth_token(&Authorization {
username: username.to_owned(),
scope: AuthorizationScope::LastFMLink,
})
}
pub fn get_lastfm_session_key(&self, username: &str) -> Result<String, Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let token: Option<String> = users
.filter(name.eq(username))
.select(lastfm_session_key)
.get_result(&mut connection)?;
token.ok_or(Error::MissingLastFMSessionKey)
}
pub fn is_lastfm_linked(&self, username: &str) -> bool {
self.get_lastfm_session_key(username).is_ok()
}
pub fn lastfm_unlink(&self, username: &str) -> Result<(), Error> {
use crate::db::users::dsl::*;
let mut connection = self.db.connect()?;
let null: Option<String> = None;
diesel::update(users.filter(name.eq(username)))
.set((lastfm_session_key.eq(&null), lastfm_username.eq(&null)))
.execute(&mut connection)?;
Ok(())
}
}
fn hash_password(password: &str) -> Result<String, Error> {
if password.is_empty() {
return Err(Error::EmptyPassword);
}
let salt = SaltString::generate(&mut OsRng);
match Pbkdf2.hash_password(password.as_bytes(), &salt) {
Ok(h) => Ok(h.to_string()),
Err(_) => Err(Error::PasswordHashing),
}
}
fn verify_password(password_hash: &str, attempted_password: &str) -> bool {
match PasswordHash::new(password_hash) {
Ok(h) => Pbkdf2
.verify_password(attempted_password.as_bytes(), &h)
.is_ok(),
Err(_) => false,
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::app::test;
use crate::test_name;
const TEST_USERNAME: &str = "Walter";
const TEST_PASSWORD: &str = "super_secret!";
#[test]
fn create_delete_user_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
assert_eq!(ctx.user_manager.list().unwrap().len(), 1);
ctx.user_manager.delete(&new_user.name).unwrap();
assert_eq!(ctx.user_manager.list().unwrap().len(), 0);
}
#[test]
fn cannot_create_user_with_blank_username() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: "".to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
assert!(matches!(
ctx.user_manager.create(&new_user).unwrap_err(),
Error::EmptyUsername
));
}
#[test]
fn cannot_create_user_with_blank_password() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: "".to_owned(),
admin: false,
};
assert!(matches!(
ctx.user_manager.create(&new_user).unwrap_err(),
Error::EmptyPassword
));
}
#[test]
fn cannot_create_duplicate_user() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
ctx.user_manager.create(&new_user).unwrap_err();
}
#[test]
fn can_read_write_preferences() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_preferences = Preferences {
web_theme_base: Some("very-dark-theme".to_owned()),
web_theme_accent: Some("#FF0000".to_owned()),
lastfm_username: None,
};
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
ctx.user_manager
.write_preferences(TEST_USERNAME, &new_preferences)
.unwrap();
let read_preferences = ctx.user_manager.read_preferences("Walter").unwrap();
assert_eq!(new_preferences, read_preferences);
}
#[test]
fn login_rejects_bad_password() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
assert!(matches!(
ctx.user_manager
.login(TEST_USERNAME, "not the password")
.unwrap_err(),
Error::IncorrectPassword
));
}
#[test]
fn login_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
assert!(ctx.user_manager.login(TEST_USERNAME, TEST_PASSWORD).is_ok())
}
#[test]
fn authenticate_rejects_bad_token() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
let fake_token = AuthToken("fake token".to_owned());
assert!(ctx
.user_manager
.authenticate(&fake_token, AuthorizationScope::PolarisAuth)
.is_err())
}
#[test]
fn authenticate_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
let token = ctx
.user_manager
.login(TEST_USERNAME, TEST_PASSWORD)
.unwrap();
let authorization = ctx
.user_manager
.authenticate(&token, AuthorizationScope::PolarisAuth)
.unwrap();
assert_eq!(
authorization,
Authorization {
username: TEST_USERNAME.to_owned(),
scope: AuthorizationScope::PolarisAuth,
}
)
}
#[test]
fn authenticate_validates_scope() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
let token = ctx
.user_manager
.generate_lastfm_link_token(TEST_USERNAME)
.unwrap();
let authorization = ctx
.user_manager
.authenticate(&token, AuthorizationScope::PolarisAuth);
assert!(matches!(
authorization.unwrap_err(),
Error::IncorrectAuthorizationScope
));
}
}

View file

@ -1,23 +0,0 @@
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
pub enum Error {
#[error("Cannot use empty username")]
EmptyUsername,
#[error("Cannot use empty password")]
EmptyPassword,
#[error("Username does not exist")]
IncorrectUsername,
#[error("Password does not match username")]
IncorrectPassword,
#[error("Invalid auth token")]
InvalidAuthToken,
#[error("Incorrect authorization scope")]
IncorrectAuthorizationScope,
#[error("Unspecified")]
Unspecified,
}
impl From<anyhow::Error> for Error {
fn from(_: anyhow::Error) -> Self {
Error::Unspecified
}
}

View file

@ -1,247 +0,0 @@
use anyhow::anyhow;
use diesel;
use diesel::prelude::*;
use std::time::{SystemTime, UNIX_EPOCH};
use super::*;
use crate::app::settings::AuthSecret;
use crate::db::DB;
const HASH_ITERATIONS: u32 = 10000;
#[derive(Clone)]
pub struct Manager {
// TODO make this private and move preferences methods in this file
pub db: DB,
auth_secret: AuthSecret,
}
impl Manager {
pub fn new(db: DB, auth_secret: AuthSecret) -> Self {
Self { db, auth_secret }
}
pub fn create(&self, new_user: &NewUser) -> Result<(), Error> {
if new_user.name.is_empty() {
return Err(Error::EmptyUsername);
}
let password_hash = hash_password(&new_user.password)?;
let connection = self.db.connect()?;
let new_user = User {
name: new_user.name.to_owned(),
password_hash,
admin: new_user.admin as i32,
};
diesel::insert_into(users::table)
.values(&new_user)
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(())
}
pub fn delete(&self, username: &str) -> Result<(), Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
diesel::delete(users.filter(name.eq(username)))
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(())
}
pub fn set_password(&self, username: &str, password: &str) -> Result<(), Error> {
let hash = hash_password(password)?;
let connection = self.db.connect()?;
use crate::db::users::dsl::*;
diesel::update(users.filter(name.eq(username)))
.set(password_hash.eq(hash))
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(())
}
pub fn set_is_admin(&self, username: &str, is_admin: bool) -> Result<(), Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
diesel::update(users.filter(name.eq(username)))
.set(admin.eq(is_admin as i32))
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(())
}
pub fn login(&self, username: &str, password: &str) -> Result<AuthToken, Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
match users
.select(password_hash)
.filter(name.eq(username))
.get_result(&connection)
{
Err(diesel::result::Error::NotFound) => Err(Error::IncorrectUsername),
Ok(hash) => {
let hash: String = hash;
if verify_password(&hash, password) {
let authorization = Authorization {
username: username.to_owned(),
scope: AuthorizationScope::PolarisAuth,
};
self.generate_auth_token(&authorization)
} else {
Err(Error::IncorrectPassword)
}
}
Err(_) => Err(Error::Unspecified),
}
}
pub fn authenticate(
&self,
auth_token: &AuthToken,
scope: AuthorizationScope,
) -> Result<Authorization, Error> {
let authorization = self.decode_auth_token(auth_token, scope)?;
if self.exists(&authorization.username)? {
Ok(authorization)
} else {
Err(Error::IncorrectUsername)
}
}
fn decode_auth_token(
&self,
auth_token: &AuthToken,
scope: AuthorizationScope,
) -> Result<Authorization, Error> {
let AuthToken(data) = auth_token;
let ttl = match scope {
AuthorizationScope::PolarisAuth => 0, // permanent
AuthorizationScope::LastFMLink => 10 * 60, // 10 minutes
};
let authorization = branca::decode(data, &self.auth_secret.key, ttl)
.map_err(|_| Error::InvalidAuthToken)?;
let authorization: Authorization =
serde_json::from_slice(&authorization[..]).map_err(|_| Error::InvalidAuthToken)?;
if authorization.scope != scope {
return Err(Error::IncorrectAuthorizationScope);
}
Ok(authorization)
}
fn generate_auth_token(&self, authorization: &Authorization) -> Result<AuthToken, Error> {
let serialized_authorization =
serde_json::to_string(&authorization).map_err(|_| Error::Unspecified)?;
branca::encode(
serialized_authorization.as_bytes(),
&self.auth_secret.key,
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(|_| Error::Unspecified)?
.as_secs() as u32,
)
.map_err(|_| Error::Unspecified)
.map(AuthToken)
}
pub fn count(&self) -> anyhow::Result<i64> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
let count = users.count().get_result(&connection)?;
Ok(count)
}
pub fn list(&self) -> Result<Vec<User>, Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
users
.select((name, password_hash, admin))
.get_results(&connection)
.map_err(|_| Error::Unspecified)
}
pub fn exists(&self, username: &str) -> Result<bool, Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
let results: Vec<String> = users
.select(name)
.filter(name.eq(username))
.get_results(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(results.len() > 0)
}
pub fn is_admin(&self, username: &str) -> Result<bool, Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
let is_admin: i32 = users
.filter(name.eq(username))
.select(admin)
.get_result(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(is_admin != 0)
}
pub fn lastfm_link(
&self,
username: &str,
lastfm_login: &str,
session_key: &str,
) -> Result<(), Error> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
diesel::update(users.filter(name.eq(username)))
.set((
lastfm_username.eq(lastfm_login),
lastfm_session_key.eq(session_key),
))
.execute(&connection)
.map_err(|_| Error::Unspecified)?;
Ok(())
}
pub fn generate_lastfm_link_token(&self, username: &str) -> Result<AuthToken, Error> {
self.generate_auth_token(&Authorization {
username: username.to_owned(),
scope: AuthorizationScope::LastFMLink,
})
}
pub fn get_lastfm_session_key(&self, username: &str) -> anyhow::Result<String> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
let token = users
.filter(name.eq(username))
.select(lastfm_session_key)
.get_result(&connection)?;
match token {
Some(t) => Ok(t),
_ => Err(anyhow!("Missing LastFM credentials")),
}
}
pub fn is_lastfm_linked(&self, username: &str) -> bool {
self.get_lastfm_session_key(username).is_ok()
}
pub fn lastfm_unlink(&self, username: &str) -> anyhow::Result<()> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
let null: Option<String> = None;
diesel::update(users.filter(name.eq(username)))
.set((lastfm_session_key.eq(&null), lastfm_username.eq(&null)))
.execute(&connection)?;
Ok(())
}
}
fn hash_password(password: &str) -> Result<String, Error> {
if password.is_empty() {
return Err(Error::EmptyPassword);
}
pbkdf2::pbkdf2_simple(password, HASH_ITERATIONS).map_err(|_| Error::Unspecified)
}
fn verify_password(password_hash: &str, attempted_password: &str) -> bool {
pbkdf2::pbkdf2_check(attempted_password, password_hash).is_ok()
}

View file

@ -1,49 +0,0 @@
use serde::{Deserialize, Serialize};
use crate::db::users;
mod error;
mod manager;
mod preferences;
#[cfg(test)]
mod test;
pub use error::*;
pub use manager::*;
pub use preferences::*;
#[derive(Debug, Insertable, Queryable)]
#[table_name = "users"]
pub struct User {
pub name: String,
pub password_hash: String,
pub admin: i32,
}
impl User {
pub fn is_admin(&self) -> bool {
self.admin != 0
}
}
#[derive(Debug, Deserialize)]
pub struct NewUser {
pub name: String,
pub password: String,
pub admin: bool,
}
#[derive(Debug)]
pub struct AuthToken(pub String);
#[derive(Debug, PartialEq, Deserialize, Serialize)]
pub enum AuthorizationScope {
PolarisAuth,
LastFMLink,
}
#[derive(Debug, PartialEq, Deserialize, Serialize)]
pub struct Authorization {
pub username: String,
pub scope: AuthorizationScope,
}

View file

@ -1,41 +0,0 @@
use anyhow::Result;
use diesel;
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use super::*;
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct Preferences {
pub lastfm_username: Option<String>,
pub web_theme_base: Option<String>,
pub web_theme_accent: Option<String>,
}
impl Manager {
pub fn read_preferences(&self, username: &str) -> Result<Preferences> {
use self::users::dsl::*;
let connection = self.db.connect()?;
let (theme_base, theme_accent, read_lastfm_username) = users
.select((web_theme_base, web_theme_accent, lastfm_username))
.filter(name.eq(username))
.get_result(&connection)?;
Ok(Preferences {
web_theme_base: theme_base,
web_theme_accent: theme_accent,
lastfm_username: read_lastfm_username,
})
}
pub fn write_preferences(&self, username: &str, preferences: &Preferences) -> Result<()> {
use crate::db::users::dsl::*;
let connection = self.db.connect()?;
diesel::update(users.filter(name.eq(username)))
.set((
web_theme_base.eq(&preferences.web_theme_base),
web_theme_accent.eq(&preferences.web_theme_accent),
))
.execute(&connection)?;
Ok(())
}
}

View file

@ -1,189 +0,0 @@
use super::*;
use crate::app::test;
use crate::test_name;
const TEST_USERNAME: &str = "Walter";
const TEST_PASSWORD: &str = "super_secret!";
#[test]
fn create_delete_user_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
assert_eq!(ctx.user_manager.list().unwrap().len(), 1);
ctx.user_manager.delete(&new_user.name).unwrap();
assert_eq!(ctx.user_manager.list().unwrap().len(), 0);
}
#[test]
fn cannot_create_user_with_blank_username() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: "".to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
assert_eq!(
ctx.user_manager.create(&new_user).unwrap_err(),
Error::EmptyUsername
);
}
#[test]
fn cannot_create_user_with_blank_password() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: "".to_owned(),
admin: false,
};
assert_eq!(
ctx.user_manager.create(&new_user).unwrap_err(),
Error::EmptyPassword
);
}
#[test]
fn cannot_create_duplicate_user() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
ctx.user_manager.create(&new_user).unwrap_err();
}
#[test]
fn can_read_write_preferences() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_preferences = Preferences {
web_theme_base: Some("very-dark-theme".to_owned()),
web_theme_accent: Some("#FF0000".to_owned()),
lastfm_username: None,
};
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
ctx.user_manager
.write_preferences(TEST_USERNAME, &new_preferences)
.unwrap();
let read_preferences = ctx.user_manager.read_preferences("Walter").unwrap();
assert_eq!(new_preferences, read_preferences);
}
#[test]
fn login_rejects_bad_password() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
assert_eq!(
ctx.user_manager
.login(TEST_USERNAME, "not the password")
.unwrap_err(),
Error::IncorrectPassword
)
}
#[test]
fn login_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
assert!(ctx.user_manager.login(TEST_USERNAME, TEST_PASSWORD).is_ok())
}
#[test]
fn authenticate_rejects_bad_token() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
let fake_token = AuthToken("fake token".to_owned());
assert!(ctx
.user_manager
.authenticate(&fake_token, AuthorizationScope::PolarisAuth)
.is_err())
}
#[test]
fn authenticate_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
let token = ctx
.user_manager
.login(TEST_USERNAME, TEST_PASSWORD)
.unwrap();
let authorization = ctx
.user_manager
.authenticate(&token, AuthorizationScope::PolarisAuth)
.unwrap();
assert_eq!(
authorization,
Authorization {
username: TEST_USERNAME.to_owned(),
scope: AuthorizationScope::PolarisAuth,
}
)
}
#[test]
fn authenticate_validates_scope() {
let ctx = test::ContextBuilder::new(test_name!()).build();
let new_user = NewUser {
name: TEST_USERNAME.to_owned(),
password: TEST_PASSWORD.to_owned(),
admin: false,
};
ctx.user_manager.create(&new_user).unwrap();
let token = ctx
.user_manager
.generate_lastfm_link_token(TEST_USERNAME)
.unwrap();
let authorization = ctx
.user_manager
.authenticate(&token, AuthorizationScope::PolarisAuth);
assert_eq!(
authorization.unwrap_err(),
Error::IncorrectAuthorizationScope
)
}

208
src/app/vfs.rs Normal file
View file

@ -0,0 +1,208 @@
use core::ops::Deref;
use diesel::prelude::*;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::path::{self, Path, PathBuf};
use crate::db::{self, mount_points, DB};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("The following real path could not be mapped to a virtual path: `{0}`")]
CouldNotMapToVirtualPath(PathBuf),
#[error("The following virtual path could not be mapped to a real path: `{0}`")]
CouldNotMapToRealPath(PathBuf),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
Database(#[from] diesel::result::Error),
}
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Eq, Queryable, Serialize)]
#[diesel(table_name = mount_points)]
pub struct MountDir {
pub source: String,
pub name: String,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct Mount {
pub source: PathBuf,
pub name: String,
}
impl From<MountDir> for Mount {
fn from(m: MountDir) -> Self {
let separator_regex = Regex::new(r"\\|/").unwrap();
let mut correct_separator = String::new();
correct_separator.push(path::MAIN_SEPARATOR);
let path_string = separator_regex.replace_all(&m.source, correct_separator.as_str());
let source = PathBuf::from(path_string.deref());
Self {
name: m.name,
source,
}
}
}
#[allow(clippy::upper_case_acronyms)]
pub struct VFS {
mounts: Vec<Mount>,
}
impl VFS {
pub fn new(mounts: Vec<Mount>) -> VFS {
VFS { mounts }
}
pub fn real_to_virtual<P: AsRef<Path>>(&self, real_path: P) -> Result<PathBuf, Error> {
for mount in &self.mounts {
if let Ok(p) = real_path.as_ref().strip_prefix(&mount.source) {
let mount_path = Path::new(&mount.name);
return if p.components().count() == 0 {
Ok(mount_path.to_path_buf())
} else {
Ok(mount_path.join(p))
};
}
}
Err(Error::CouldNotMapToVirtualPath(real_path.as_ref().into()))
}
pub fn virtual_to_real<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf, Error> {
for mount in &self.mounts {
let mount_path = Path::new(&mount.name);
if let Ok(p) = virtual_path.as_ref().strip_prefix(mount_path) {
return if p.components().count() == 0 {
Ok(mount.source.clone())
} else {
Ok(mount.source.join(p))
};
}
}
Err(Error::CouldNotMapToRealPath(virtual_path.as_ref().into()))
}
pub fn mounts(&self) -> &Vec<Mount> {
&self.mounts
}
}
#[derive(Clone)]
pub struct Manager {
db: DB,
}
impl Manager {
pub fn new(db: DB) -> Self {
Self { db }
}
pub fn get_vfs(&self) -> Result<VFS, Error> {
let mount_dirs = self.mount_dirs()?;
let mounts = mount_dirs.into_iter().map(|p| p.into()).collect();
Ok(VFS::new(mounts))
}
pub fn mount_dirs(&self) -> Result<Vec<MountDir>, Error> {
use self::mount_points::dsl::*;
let mut connection = self.db.connect()?;
let mount_dirs: Vec<MountDir> = mount_points
.select((source, name))
.get_results(&mut connection)?;
Ok(mount_dirs)
}
pub fn set_mount_dirs(&self, mount_dirs: &[MountDir]) -> Result<(), Error> {
let mut connection = self.db.connect()?;
connection.transaction::<_, diesel::result::Error, _>(|connection| {
use self::mount_points::dsl::*;
diesel::delete(mount_points).execute(&mut *connection)?;
diesel::insert_into(mount_points)
.values(mount_dirs)
.execute(&mut *connection)?; // TODO https://github.com/diesel-rs/diesel/issues/1822
Ok(())
})?;
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn converts_virtual_to_real() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let real_path: PathBuf = ["test_dir", "somewhere", "something.png"].iter().collect();
let virtual_path: PathBuf = ["root", "somewhere", "something.png"].iter().collect();
let converted_path = vfs.virtual_to_real(virtual_path.as_path()).unwrap();
assert_eq!(converted_path, real_path);
}
#[test]
fn converts_virtual_to_real_top_level() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let real_path = Path::new("test_dir");
let converted_path = vfs.virtual_to_real(Path::new("root")).unwrap();
assert_eq!(converted_path, real_path);
}
#[test]
fn converts_real_to_virtual() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let virtual_path: PathBuf = ["root", "somewhere", "something.png"].iter().collect();
let real_path: PathBuf = ["test_dir", "somewhere", "something.png"].iter().collect();
let converted_path = vfs.real_to_virtual(real_path.as_path()).unwrap();
assert_eq!(converted_path, virtual_path);
}
#[test]
fn cleans_path_string() {
let mut correct_path = path::PathBuf::new();
if cfg!(target_os = "windows") {
correct_path.push("C:\\");
} else {
correct_path.push("/usr");
}
correct_path.push("some");
correct_path.push("path");
let tests = if cfg!(target_os = "windows") {
vec![
r#"C:/some/path"#,
r#"C:\some\path"#,
r#"C:\some\path\"#,
r#"C:\some\path\\\\"#,
r#"C:\some/path//"#,
]
} else {
vec![
r#"/usr/some/path"#,
r#"/usr\some\path"#,
r#"/usr\some\path\"#,
r#"/usr\some\path\\\\"#,
r#"/usr\some/path//"#,
]
};
for test in tests {
let mount_dir = MountDir {
source: test.to_owned(),
name: "name".to_owned(),
};
let mount: Mount = mount_dir.into();
assert_eq!(mount.source, correct_path);
}
}
}

View file

@ -1,42 +0,0 @@
use anyhow::*;
use diesel::prelude::*;
use super::*;
use crate::db::mount_points;
use crate::db::DB;
#[derive(Clone)]
pub struct Manager {
db: DB,
}
impl Manager {
pub fn new(db: DB) -> Self {
Self { db }
}
pub fn get_vfs(&self) -> Result<VFS> {
let mount_dirs = self.mount_dirs()?;
let mounts = mount_dirs.into_iter().map(|p| p.into()).collect();
Ok(VFS::new(mounts))
}
pub fn mount_dirs(&self) -> Result<Vec<MountDir>> {
use self::mount_points::dsl::*;
let connection = self.db.connect()?;
let mount_dirs: Vec<MountDir> = mount_points
.select((source, name))
.get_results(&connection)?;
Ok(mount_dirs)
}
pub fn set_mount_dirs(&self, mount_dirs: &Vec<MountDir>) -> Result<()> {
use self::mount_points::dsl::*;
let connection = self.db.connect()?;
diesel::delete(mount_points).execute(&connection)?;
diesel::insert_into(mount_points)
.values(mount_dirs)
.execute(&*connection)?; // TODO https://github.com/diesel-rs/diesel/issues/1822
Ok(())
}
}

View file

@ -1,82 +0,0 @@
use anyhow::*;
use core::ops::Deref;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::path::{self, Path, PathBuf};
use crate::db::mount_points;
mod manager;
#[cfg(test)]
mod test;
pub use manager::*;
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Queryable, Serialize)]
#[table_name = "mount_points"]
pub struct MountDir {
pub source: String,
pub name: String,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct Mount {
pub source: PathBuf,
pub name: String,
}
impl From<MountDir> for Mount {
fn from(m: MountDir) -> Self {
let separator_regex = Regex::new(r"\\|/").unwrap();
let mut correct_separator = String::new();
correct_separator.push(path::MAIN_SEPARATOR);
let path_string = separator_regex.replace_all(&m.source, correct_separator.as_str());
let source = PathBuf::from(path_string.deref());
Self {
name: m.name,
source: source,
}
}
}
pub struct VFS {
mounts: Vec<Mount>,
}
impl VFS {
pub fn new(mounts: Vec<Mount>) -> VFS {
VFS { mounts }
}
pub fn real_to_virtual<P: AsRef<Path>>(&self, real_path: P) -> Result<PathBuf> {
for mount in &self.mounts {
if let Ok(p) = real_path.as_ref().strip_prefix(&mount.source) {
let mount_path = Path::new(&mount.name);
return if p.components().count() == 0 {
Ok(mount_path.to_path_buf())
} else {
Ok(mount_path.join(p))
};
}
}
bail!("Real path has no match in VFS")
}
pub fn virtual_to_real<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf> {
for mount in &self.mounts {
let mount_path = Path::new(&mount.name);
if let Ok(p) = virtual_path.as_ref().strip_prefix(mount_path) {
return if p.components().count() == 0 {
Ok(mount.source.clone())
} else {
Ok(mount.source.join(p))
};
}
}
bail!("Virtual path has no match in VFS")
}
pub fn mounts(&self) -> &Vec<Mount> {
&self.mounts
}
}

View file

@ -1,77 +0,0 @@
use std::path::{Path, PathBuf};
use super::*;
#[test]
fn converts_virtual_to_real() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let real_path: PathBuf = ["test_dir", "somewhere", "something.png"].iter().collect();
let virtual_path: PathBuf = ["root", "somewhere", "something.png"].iter().collect();
let converted_path = vfs.virtual_to_real(virtual_path.as_path()).unwrap();
assert_eq!(converted_path, real_path);
}
#[test]
fn converts_virtual_to_real_top_level() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let real_path = Path::new("test_dir");
let converted_path = vfs.virtual_to_real(Path::new("root")).unwrap();
assert_eq!(converted_path, real_path);
}
#[test]
fn converts_real_to_virtual() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let virtual_path: PathBuf = ["root", "somewhere", "something.png"].iter().collect();
let real_path: PathBuf = ["test_dir", "somewhere", "something.png"].iter().collect();
let converted_path = vfs.real_to_virtual(real_path.as_path()).unwrap();
assert_eq!(converted_path, virtual_path);
}
#[test]
fn cleans_path_string() {
let mut correct_path = path::PathBuf::new();
if cfg!(target_os = "windows") {
correct_path.push("C:\\");
} else {
correct_path.push("/usr");
}
correct_path.push("some");
correct_path.push("path");
let tests = if cfg!(target_os = "windows") {
vec![
r#"C:/some/path"#,
r#"C:\some\path"#,
r#"C:\some\path\"#,
r#"C:\some\path\\\\"#,
r#"C:\some/path//"#,
]
} else {
vec![
r#"/usr/some/path"#,
r#"/usr\some\path"#,
r#"/usr\some\path\"#,
r#"/usr\some\path\\\\"#,
r#"/usr\some/path//"#,
]
};
for test in tests {
let mount_dir = MountDir {
source: test.to_owned(),
name: "name".to_owned(),
};
let mount: Mount = mount_dir.into();
assert_eq!(mount.source, correct_path);
}
}

98
src/db.rs Normal file
View file

@ -0,0 +1,98 @@
use diesel::r2d2::{self, ConnectionManager, PooledConnection};
use diesel::sqlite::SqliteConnection;
use diesel::RunQueryDsl;
use diesel_migrations::EmbeddedMigrations;
use diesel_migrations::MigrationHarness;
use std::path::{Path, PathBuf};
mod schema;
pub use self::schema::*;
const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations");
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Could not initialize database connection pool")]
ConnectionPoolBuild,
#[error("Could not acquire database connection from pool")]
ConnectionPool,
#[error("Filesystem error for `{0}`: `{1}`")]
Io(PathBuf, std::io::Error),
#[error("Could not apply database migrations")]
Migration,
}
#[derive(Clone)]
pub struct DB {
pool: r2d2::Pool<ConnectionManager<SqliteConnection>>,
}
#[derive(Debug)]
struct ConnectionCustomizer {}
impl diesel::r2d2::CustomizeConnection<SqliteConnection, diesel::r2d2::Error>
for ConnectionCustomizer
{
fn on_acquire(&self, connection: &mut SqliteConnection) -> Result<(), diesel::r2d2::Error> {
let query = diesel::sql_query(
r#"
PRAGMA busy_timeout = 60000;
PRAGMA journal_mode = WAL;
PRAGMA synchronous = NORMAL;
PRAGMA foreign_keys = ON;
"#,
);
query
.execute(connection)
.map_err(diesel::r2d2::Error::QueryError)?;
Ok(())
}
}
impl DB {
pub fn new(path: &Path) -> Result<DB, Error> {
let directory = path.parent().unwrap();
std::fs::create_dir_all(directory).map_err(|e| Error::Io(directory.to_owned(), e))?;
let manager = ConnectionManager::<SqliteConnection>::new(path.to_string_lossy());
let pool = diesel::r2d2::Pool::builder()
.connection_customizer(Box::new(ConnectionCustomizer {}))
.build(manager)
.or(Err(Error::ConnectionPoolBuild))?;
let db = DB { pool };
db.migrate_up()?;
Ok(db)
}
pub fn connect(&self) -> Result<PooledConnection<ConnectionManager<SqliteConnection>>, Error> {
self.pool.get().or(Err(Error::ConnectionPool))
}
#[cfg(test)]
fn migrate_down(&self) -> Result<(), Error> {
let mut connection = self.connect()?;
connection
.revert_all_migrations(MIGRATIONS)
.and(Ok(()))
.or(Err(Error::Migration))
}
fn migrate_up(&self) -> Result<(), Error> {
let mut connection = self.connect()?;
connection
.run_pending_migrations(MIGRATIONS)
.and(Ok(()))
.or(Err(Error::Migration))
}
}
#[test]
fn run_migrations() {
use crate::test::*;
use crate::test_name;
let output_dir = prepare_test_directory(test_name!());
let db_path = output_dir.join("db.sqlite");
let db = DB::new(&db_path).unwrap();
db.migrate_down().unwrap();
db.migrate_up().unwrap();
}

View file

@ -1,93 +0,0 @@
use anyhow::*;
use diesel::r2d2::{self, ConnectionManager, PooledConnection};
use diesel::sqlite::SqliteConnection;
use diesel::RunQueryDsl;
use diesel_migrations;
use std::path::Path;
mod schema;
pub use self::schema::*;
#[allow(dead_code)]
const DB_MIGRATIONS_PATH: &str = "migrations";
embed_migrations!("migrations");
#[derive(Clone)]
pub struct DB {
pool: r2d2::Pool<ConnectionManager<SqliteConnection>>,
}
#[derive(Debug)]
struct ConnectionCustomizer {}
impl diesel::r2d2::CustomizeConnection<SqliteConnection, diesel::r2d2::Error>
for ConnectionCustomizer
{
fn on_acquire(&self, connection: &mut SqliteConnection) -> Result<(), diesel::r2d2::Error> {
let query = diesel::sql_query(
r#"
PRAGMA busy_timeout = 60000;
PRAGMA journal_mode = WAL;
PRAGMA synchronous = NORMAL;
PRAGMA foreign_keys = ON;
"#,
);
query
.execute(connection)
.map_err(|e| diesel::r2d2::Error::QueryError(e))?;
Ok(())
}
}
impl DB {
pub fn new(path: &Path) -> Result<DB> {
std::fs::create_dir_all(&path.parent().unwrap())?;
let manager = ConnectionManager::<SqliteConnection>::new(path.to_string_lossy());
let pool = diesel::r2d2::Pool::builder()
.connection_customizer(Box::new(ConnectionCustomizer {}))
.build(manager)?;
let db = DB { pool: pool };
db.migrate_up()?;
Ok(db)
}
pub fn connect(&self) -> Result<PooledConnection<ConnectionManager<SqliteConnection>>> {
self.pool.get().map_err(Error::new)
}
#[allow(dead_code)]
fn migrate_down(&self) -> Result<()> {
let connection = self.connect().unwrap();
loop {
match diesel_migrations::revert_latest_migration_in_directory(
&connection,
Path::new(DB_MIGRATIONS_PATH),
) {
Ok(_) => (),
Err(diesel_migrations::RunMigrationsError::MigrationError(
diesel_migrations::MigrationError::NoMigrationRun,
)) => break,
Err(e) => bail!(e),
}
}
Ok(())
}
fn migrate_up(&self) -> Result<()> {
let connection = self.connect().unwrap();
embedded_migrations::run(&connection)?;
Ok(())
}
}
#[test]
fn run_migrations() {
use crate::test::*;
use crate::test_name;
let output_dir = prepare_test_directory(test_name!());
let db_path = output_dir.join("db.sqlite");
let db = DB::new(&db_path).unwrap();
db.migrate_down().unwrap();
db.migrate_up().unwrap();
}

View file

@ -68,6 +68,10 @@ table! {
album -> Nullable<Text>,
artwork -> Nullable<Text>,
duration -> Nullable<Integer>,
lyricist -> Nullable<Text>,
composer -> Nullable<Text>,
genre -> Nullable<Text>,
label -> Nullable<Text>,
}
}

View file

@ -6,11 +6,12 @@ extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
use anyhow::*;
use log::info;
use simplelog::{CombinedLogger, LevelFilter, SharedLogger, TermLogger, TerminalMode, WriteLogger};
use simplelog::{
ColorChoice, CombinedLogger, LevelFilter, SharedLogger, TermLogger, TerminalMode, WriteLogger,
};
use std::fs;
use std::path::Path;
use std::path::{Path, PathBuf};
mod app;
mod db;
@ -22,30 +23,57 @@ mod test;
mod ui;
mod utils;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
App(#[from] app::Error),
#[error("Could not parse command line arguments:\n\n{0}")]
CliArgsParsing(getopts::Fail),
#[cfg(unix)]
#[error("Failed to turn polaris process into a daemon:\n\n{0}")]
Daemonize(daemonize::DaemonizeError),
#[error("Could not create log directory `{0}`:\n\n{1}")]
LogDirectoryCreationError(PathBuf, std::io::Error),
#[error("Could not create log file `{0}`:\n\n{1}")]
LogFileCreationError(PathBuf, std::io::Error),
#[error("Could not initialize log system:\n\n{0}")]
LogInitialization(log::SetLoggerError),
#[cfg(unix)]
#[error("Could not create pid directory `{0}`:\n\n{1}")]
PidDirectoryCreationError(PathBuf, std::io::Error),
#[cfg(unix)]
#[error("Could not notify systemd of initialization success:\n\n{0}")]
SystemDNotify(std::io::Error),
}
#[cfg(unix)]
fn daemonize<T: AsRef<Path>>(foreground: bool, pid_file_path: T) -> Result<()> {
fn daemonize<T: AsRef<Path>>(foreground: bool, pid_file_path: T) -> Result<(), Error> {
if foreground {
return Ok(());
}
if let Some(parent) = pid_file_path.as_ref().parent() {
fs::create_dir_all(parent)?;
fs::create_dir_all(parent)
.map_err(|e| Error::PidDirectoryCreationError(parent.to_owned(), e))?;
}
let daemonize = daemonize::Daemonize::new()
.pid_file(pid_file_path.as_ref())
.working_directory(".");
daemonize.start()?;
daemonize.start().map_err(Error::Daemonize)?;
Ok(())
}
#[cfg(unix)]
fn notify_ready() -> Result<()> {
fn notify_ready() -> Result<(), Error> {
if let Ok(true) = sd_notify::booted() {
sd_notify::notify(true, &[sd_notify::NotifyState::Ready])?;
sd_notify::notify(true, &[sd_notify::NotifyState::Ready]).map_err(Error::SystemDNotify)?;
}
Ok(())
}
fn init_logging<T: AsRef<Path>>(log_level: LevelFilter, log_file_path: &Option<T>) -> Result<()> {
fn init_logging<T: AsRef<Path>>(
log_level: LevelFilter,
log_file_path: &Option<T>,
) -> Result<(), Error> {
let log_config = simplelog::ConfigBuilder::new()
.set_location_level(LevelFilter::Error)
.build();
@ -54,29 +82,34 @@ fn init_logging<T: AsRef<Path>>(log_level: LevelFilter, log_file_path: &Option<T
log_level,
log_config.clone(),
TerminalMode::Mixed,
ColorChoice::Auto,
)];
if let Some(path) = log_file_path {
if let Some(parent) = path.as_ref().parent() {
fs::create_dir_all(parent)?;
fs::create_dir_all(parent)
.map_err(|e| Error::LogDirectoryCreationError(parent.to_owned(), e))?;
}
loggers.push(WriteLogger::new(
log_level,
log_config.clone(),
fs::File::create(path)?,
log_config,
fs::File::create(path)
.map_err(|e| Error::LogFileCreationError(path.as_ref().to_owned(), e))?,
));
}
CombinedLogger::init(loggers)?;
CombinedLogger::init(loggers).map_err(Error::LogInitialization)?;
Ok(())
}
fn main() -> Result<()> {
fn main() -> Result<(), Error> {
// Parse CLI options
let args: Vec<String> = std::env::args().collect();
let options_manager = options::Manager::new();
let cli_options = options_manager.parse(&args[1..])?;
let cli_options = options_manager
.parse(&args[1..])
.map_err(Error::CliArgsParsing)?;
if cli_options.show_help {
let program = args[0].clone();

View file

@ -1,4 +1,3 @@
use anyhow::Result;
use simplelog::LevelFilter;
use std::path::PathBuf;
@ -28,7 +27,7 @@ impl Manager {
}
}
pub fn parse(&self, input: &[String]) -> Result<CLIOptions> {
pub fn parse(&self, input: &[String]) -> Result<CLIOptions, getopts::Fail> {
let matches = self.protocol.parse(input)?;
Ok(CLIOptions {

View file

@ -106,6 +106,6 @@ impl Paths {
paths.log_file_path = None;
};
return paths;
paths
}
}

View file

@ -1,10 +1,10 @@
use actix_web::{
middleware::{normalize::TrailingSlash, Compress, Logger, NormalizePath},
dev::Service,
middleware::{Compress, Logger, NormalizePath},
rt::System,
web::{self, ServiceConfig},
App as ActixApp, HttpServer,
};
use anyhow::*;
use log::error;
use crate::app::App;
@ -16,7 +16,6 @@ pub mod test;
pub fn make_config(app: App) -> impl FnOnce(&mut ServiceConfig) + Clone {
move |cfg: &mut ServiceConfig| {
let encryption_key = cookie::Key::derive_from(&app.auth_secret.key[..]);
cfg.app_data(web::Data::new(app.index))
.app_data(web::Data::new(app.config_manager))
.app_data(web::Data::new(app.ddns_manager))
@ -26,12 +25,10 @@ pub fn make_config(app: App) -> impl FnOnce(&mut ServiceConfig) + Clone {
.app_data(web::Data::new(app.thumbnail_manager))
.app_data(web::Data::new(app.user_manager))
.app_data(web::Data::new(app.vfs_manager))
.app_data(web::Data::new(encryption_key))
.service(
web::scope("/api")
.configure(api::make_config())
.wrap_fn(api::http_auth_middleware)
.wrap(NormalizePath::new(TrailingSlash::Trim)),
.wrap(NormalizePath::trim()),
)
.service(
actix_files::Files::new("/swagger", app.swagger_dir_path)
@ -46,20 +43,34 @@ pub fn make_config(app: App) -> impl FnOnce(&mut ServiceConfig) + Clone {
}
}
pub fn run(app: App) -> Result<()> {
System::run(move || {
let address = format!("0.0.0.0:{}", app.port);
pub fn run(app: App) -> Result<(), std::io::Error> {
let address = ("0.0.0.0", app.port);
System::new().block_on(
HttpServer::new(move || {
ActixApp::new()
.wrap(Logger::default())
.wrap_fn(|req, srv| {
// For some reason, actix logs error as DEBUG level.
// This logs them as ERROR level
// See https://github.com/actix/actix-web/issues/2637
let response_future = srv.call(req);
async {
let response = response_future.await?;
if let Some(error) = response.response().error() {
error!("{}", error);
}
Ok(response)
}
})
.wrap(Compress::default())
.configure(make_config(app.clone()))
})
.disable_signals()
.bind(address)
.map(|server| server.run())
.map_err(|e| error!("Error starting HTTP server: {:?}", e))
.ok();
})?;
Ok(())
.map_err(|e| {
error!("Error starting HTTP server: {:?}", e);
e
})?
.run(),
)
}

View file

@ -1,29 +1,29 @@
use actix_files::NamedFile;
use actix_web::body::BoxBody;
use actix_web::http::header::ContentEncoding;
use actix_web::{
client::HttpError,
delete,
dev::{MessageBody, Payload, Service, ServiceRequest, ServiceResponse},
error::{BlockingError, ErrorForbidden, ErrorInternalServerError, ErrorUnauthorized},
dev::Payload,
error::{ErrorForbidden, ErrorInternalServerError, ErrorUnauthorized},
get,
http::StatusCode,
post, put,
web::{self, Data, Json, JsonConfig, ServiceConfig},
FromRequest, HttpMessage, HttpRequest, HttpResponse, ResponseError,
FromRequest, HttpRequest, HttpResponse, Responder, ResponseError,
};
use actix_web_httpauth::extractors::{basic::BasicAuth, bearer::BearerAuth};
use cookie::{self, *};
use futures_util::future::{err, ok};
use actix_web_httpauth::extractors::bearer::BearerAuth;
use futures_util::future::err;
use percent_encoding::percent_decode_str;
use std::future::Future;
use std::ops::Deref;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::str;
use crate::app::{
config, ddns,
index::{self, Index},
lastfm, playlist, settings, thumbnail, user, vfs,
lastfm, playlist, settings, thumbnail, user,
vfs::{self, MountDir},
};
use crate::service::{dto, error::*};
@ -73,131 +73,70 @@ pub fn make_config() -> impl FnOnce(&mut ServiceConfig) + Clone {
impl ResponseError for APIError {
fn status_code(&self) -> StatusCode {
match self {
APIError::IncorrectCredentials => StatusCode::UNAUTHORIZED,
APIError::EmptyUsername => StatusCode::BAD_REQUEST,
APIError::EmptyPassword => StatusCode::BAD_REQUEST,
APIError::DeletingOwnAccount => StatusCode::CONFLICT,
APIError::OwnAdminPrivilegeRemoval => StatusCode::CONFLICT,
APIError::AuthorizationTokenEncoding => StatusCode::INTERNAL_SERVER_ERROR,
APIError::AdminPermissionRequired => StatusCode::UNAUTHORIZED,
APIError::AudioFileIOError => StatusCode::NOT_FOUND,
APIError::ThumbnailFileIOError => StatusCode::NOT_FOUND,
APIError::AuthenticationRequired => StatusCode::UNAUTHORIZED,
APIError::BrancaTokenEncoding => StatusCode::INTERNAL_SERVER_ERROR,
APIError::DdnsUpdateQueryFailed(s) => {
StatusCode::from_u16(*s).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)
}
APIError::Database(_) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::DeletingOwnAccount => StatusCode::CONFLICT,
APIError::EmbeddedArtworkNotFound => StatusCode::NOT_FOUND,
APIError::EmptyPassword => StatusCode::BAD_REQUEST,
APIError::EmptyUsername => StatusCode::BAD_REQUEST,
APIError::IncorrectCredentials => StatusCode::UNAUTHORIZED,
APIError::Internal => StatusCode::INTERNAL_SERVER_ERROR,
APIError::Io(_, _) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::LastFMAccountNotLinked => StatusCode::NO_CONTENT,
APIError::LastFMLinkContentBase64DecodeError => StatusCode::BAD_REQUEST,
APIError::LastFMLinkContentEncodingError => StatusCode::BAD_REQUEST,
APIError::UserNotFound => StatusCode::NOT_FOUND,
APIError::LastFMNowPlaying(_) => StatusCode::FAILED_DEPENDENCY,
APIError::LastFMScrobble(_) => StatusCode::FAILED_DEPENDENCY,
APIError::LastFMScrobblerAuthentication(_) => StatusCode::FAILED_DEPENDENCY,
APIError::OwnAdminPrivilegeRemoval => StatusCode::CONFLICT,
APIError::PasswordHashing => StatusCode::INTERNAL_SERVER_ERROR,
APIError::PlaylistNotFound => StatusCode::NOT_FOUND,
APIError::Settings(_) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::SongMetadataNotFound => StatusCode::NOT_FOUND,
APIError::ThumbnailFlacDecoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::ThumbnailFileIOError => StatusCode::NOT_FOUND,
APIError::ThumbnailId3Decoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::ThumbnailImageDecoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::ThumbnailMp4Decoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::TomlDeserialization(_) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::UnsupportedThumbnailFormat(_) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::UserNotFound => StatusCode::NOT_FOUND,
APIError::VFSPathNotFound => StatusCode::NOT_FOUND,
APIError::Unspecified => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
#[derive(Clone)]
struct Cookies {
jar: CookieJar,
key: Key,
}
impl Cookies {
fn new(key: Key) -> Self {
let jar = CookieJar::new();
Self { jar, key }
fn error_response(&self) -> HttpResponse<BoxBody> {
HttpResponse::new(self.status_code())
}
fn add_original(&mut self, cookie: Cookie<'static>) {
self.jar.add_original(cookie);
}
fn add(&mut self, cookie: Cookie<'static>) {
self.jar.add(cookie);
}
fn add_signed(&mut self, cookie: Cookie<'static>) {
self.jar.signed(&self.key).add(cookie);
}
#[allow(dead_code)]
fn get(&self, name: &str) -> Option<&Cookie> {
self.jar.get(name)
}
fn get_signed(&mut self, name: &str) -> Option<Cookie> {
self.jar.signed(&self.key).get(name)
}
}
impl FromRequest for Cookies {
type Error = actix_web::Error;
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
type Config = ();
fn from_request(request: &HttpRequest, _payload: &mut Payload) -> Self::Future {
let request_cookies = match request.cookies() {
Ok(c) => c,
Err(_) => return Box::pin(err(ErrorInternalServerError(APIError::Unspecified))),
};
let key = match request.app_data::<Data<Key>>() {
Some(k) => k.as_ref(),
None => return Box::pin(err(ErrorInternalServerError(APIError::Unspecified))),
};
let mut cookies = Cookies::new(key.clone());
for cookie in request_cookies.deref() {
cookies.add_original(cookie.clone());
}
Box::pin(ok(cookies))
}
}
#[derive(Debug)]
enum AuthSource {
AuthorizationBasic,
AuthorizationBearer,
Cookie,
QueryParameter,
}
#[derive(Debug)]
struct Auth {
username: String,
source: AuthSource,
}
impl FromRequest for Auth {
type Error = actix_web::Error;
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
type Config = ();
fn from_request(request: &HttpRequest, payload: &mut Payload) -> Self::Future {
let user_manager = match request.app_data::<Data<user::Manager>>() {
Some(m) => m.clone(),
None => return Box::pin(err(ErrorInternalServerError(APIError::Unspecified))),
None => return Box::pin(err(ErrorInternalServerError(APIError::Internal))),
};
let cookies_future = Cookies::from_request(request, payload);
let basic_auth_future = BasicAuth::from_request(request, payload);
let bearer_auth_future = BearerAuth::from_request(request, payload);
let query_params_future =
web::Query::<dto::AuthQueryParameters>::from_request(request, payload);
Box::pin(async move {
// Auth via session cookie
{
let mut cookies = cookies_future.await?;
if let Some(session_cookie) = cookies.get_signed(dto::COOKIE_SESSION) {
let username = session_cookie.value().to_string();
let exists = block(move || user_manager.exists(&username)).await?;
if !exists {
return Err(ErrorUnauthorized(APIError::Unspecified));
}
return Ok(Auth {
username: session_cookie.value().to_string(),
source: AuthSource::Cookie,
});
}
}
// Auth via bearer token in query parameter
if let Ok(query) = query_params_future.await {
let auth_token = user::AuthToken(query.auth_token.clone());
@ -206,8 +145,7 @@ impl FromRequest for Auth {
})
.await?;
return Ok(Auth {
username: authorization.username.to_owned(),
source: AuthSource::QueryParameter,
username: authorization.username,
});
}
@ -219,30 +157,11 @@ impl FromRequest for Auth {
})
.await?;
return Ok(Auth {
username: authorization.username.to_owned(),
source: AuthSource::AuthorizationBearer,
username: authorization.username,
});
}
// Auth via basic authorization header
{
let basic_auth = basic_auth_future.await?;
let username = basic_auth.user_id().to_string();
let password = basic_auth
.password()
.map(|s| s.as_ref())
.unwrap_or("")
.to_string();
let auth_result = block(move || user_manager.login(&username, &password)).await;
if auth_result.is_ok() {
Ok(Auth {
username: basic_auth.user_id().to_string(),
source: AuthSource::AuthorizationBasic,
})
} else {
Err(ErrorUnauthorized(APIError::Unspecified))
}
}
Err(ErrorUnauthorized(APIError::AuthenticationRequired))
})
}
}
@ -255,12 +174,11 @@ struct AdminRights {
impl FromRequest for AdminRights {
type Error = actix_web::Error;
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
type Config = ();
fn from_request(request: &HttpRequest, payload: &mut Payload) -> Self::Future {
let user_manager = match request.app_data::<Data<user::Manager>>() {
Some(m) => m.clone(),
None => return Box::pin(err(ErrorInternalServerError(APIError::Unspecified))),
None => return Box::pin(err(ErrorInternalServerError(APIError::Internal))),
};
let auth_future = Auth::from_request(request, payload);
@ -269,7 +187,7 @@ impl FromRequest for AdminRights {
let user_manager_count = user_manager.clone();
let user_count = block(move || user_manager_count.count()).await;
match user_count {
Err(_) => return Err(ErrorInternalServerError(APIError::Unspecified)),
Err(e) => return Err(e.into()),
Ok(0) => return Ok(AdminRights { auth: None }),
_ => (),
};
@ -280,101 +198,33 @@ impl FromRequest for AdminRights {
if is_admin {
Ok(AdminRights { auth: Some(auth) })
} else {
Err(ErrorForbidden(APIError::Unspecified))
Err(ErrorForbidden(APIError::AdminPermissionRequired))
}
})
}
}
pub fn http_auth_middleware<
B: MessageBody + 'static,
S: Service<Response = ServiceResponse<B>, Request = ServiceRequest, Error = actix_web::Error>
+ 'static,
>(
request: ServiceRequest,
service: &mut S,
) -> Pin<Box<dyn Future<Output = Result<ServiceResponse<B>, actix_web::Error>>>> {
let user_manager = match request.app_data::<Data<user::Manager>>() {
Some(m) => m.clone(),
None => return Box::pin(err(ErrorInternalServerError(APIError::Unspecified))),
};
let (request, mut payload) = request.into_parts();
let auth_future = Auth::from_request(&request, &mut payload);
let cookies_future = Cookies::from_request(&request, &mut payload);
let request = match ServiceRequest::from_parts(request, payload) {
Ok(s) => s,
Err(_) => return Box::pin(err(ErrorInternalServerError(APIError::Unspecified))),
};
let response_future = service.call(request);
Box::pin(async move {
let mut response = response_future.await?;
if let Ok(auth) = auth_future.await {
let set_cookies = match auth.source {
AuthSource::AuthorizationBasic => true,
AuthSource::AuthorizationBearer => false,
AuthSource::Cookie => false,
AuthSource::QueryParameter => false,
};
if set_cookies {
let cookies = cookies_future.await?;
let username = auth.username.clone();
let is_admin = block(move || {
user_manager
.is_admin(&auth.username)
.map_err(|_| APIError::Unspecified)
})
.await?;
add_auth_cookies(response.response_mut(), &cookies, &username, is_admin)?;
}
}
Ok(response)
})
struct MediaFile {
named_file: NamedFile,
}
fn add_auth_cookies<T>(
response: &mut HttpResponse<T>,
cookies: &Cookies,
username: &str,
is_admin: bool,
) -> Result<(), HttpError> {
let mut cookies = cookies.clone();
cookies.add_signed(
Cookie::build(dto::COOKIE_SESSION, username.to_owned())
.same_site(cookie::SameSite::Lax)
.http_only(true)
.permanent()
.finish(),
);
cookies.add(
Cookie::build(dto::COOKIE_USERNAME, username.to_owned())
.same_site(cookie::SameSite::Lax)
.http_only(false)
.permanent()
.path("/")
.finish(),
);
cookies.add(
Cookie::build(dto::COOKIE_ADMIN, format!("{}", is_admin))
.same_site(cookie::SameSite::Lax)
.http_only(false)
.permanent()
.path("/")
.finish(),
);
let headers = response.headers_mut();
for cookie in cookies.jar.delta() {
http::HeaderValue::from_str(&cookie.to_string()).map(|c| {
headers.append(http::header::SET_COOKIE, c);
})?;
impl MediaFile {
fn new(named_file: NamedFile) -> Self {
Self { named_file }
}
}
Ok(())
impl Responder for MediaFile {
type Body = BoxBody;
fn respond_to(self, req: &HttpRequest) -> HttpResponse<Self::Body> {
// Intentionally turn off content encoding for media files because:
// 1. There is little value in compressing files that are already compressed (mp3, jpg, etc.)
// 2. The Content-Length header is incompatible with content encoding (other than identity), and can be valuable for clients
self.named_file
.set_content_encoding(ContentEncoding::Identity)
.into_response(req)
}
}
async fn block<F, I, E>(f: F) -> Result<I, APIError>
@ -383,10 +233,10 @@ where
I: Send + 'static,
E: Send + std::fmt::Debug + 'static + Into<APIError>,
{
actix_web::web::block(f).await.map_err(|e| match e {
BlockingError::Error(e) => e.into(),
BlockingError::Canceled => APIError::Unspecified,
})
actix_web::web::block(f)
.await
.map_err(|_| APIError::Internal)
.and_then(|r| r.map_err(|e| e.into()))
}
#[get("/version")]
@ -458,11 +308,7 @@ async fn put_mount_dirs(
vfs_manager: Data<vfs::Manager>,
new_mount_dirs: Json<Vec<dto::MountDir>>,
) -> Result<HttpResponse, APIError> {
let new_mount_dirs = new_mount_dirs
.to_owned()
.into_iter()
.map(|m| m.into())
.collect();
let new_mount_dirs: Vec<MountDir> = new_mount_dirs.iter().cloned().map(|m| m.into()).collect();
block(move || vfs_manager.set_mount_dirs(&new_mount_dirs)).await?;
Ok(HttpResponse::new(StatusCode::OK))
}
@ -515,10 +361,8 @@ async fn update_user(
user_update: Json<dto::UserUpdate>,
) -> Result<HttpResponse, APIError> {
if let Some(auth) = &admin_rights.auth {
if auth.username == name.as_str() {
if user_update.new_is_admin == Some(false) {
return Err(APIError::OwnAdminPrivilegeRemoval);
}
if auth.username == name.as_str() && user_update.new_is_admin == Some(false) {
return Err(APIError::OwnAdminPrivilegeRemoval);
}
}
@ -582,7 +426,6 @@ async fn trigger_index(
async fn login(
user_manager: Data<user::Manager>,
credentials: Json<dto::Credentials>,
cookies: Cookies,
) -> Result<HttpResponse, APIError> {
let username = credentials.username.clone();
let (user::AuthToken(token), is_admin) =
@ -597,9 +440,7 @@ async fn login(
token,
is_admin,
};
let mut response = HttpResponse::Ok().json(authorization);
add_auth_cookies(&mut response, &cookies, &username, is_admin)
.map_err(|_| APIError::Unspecified)?;
let response = HttpResponse::Ok().json(authorization);
Ok(response)
}
@ -619,7 +460,7 @@ async fn browse(
path: web::Path<String>,
) -> Result<Json<Vec<index::CollectionFile>>, APIError> {
let result = block(move || {
let path = percent_decode_str(&(path.0)).decode_utf8_lossy();
let path = percent_decode_str(&path).decode_utf8_lossy();
index.browse(Path::new(path.as_ref()))
})
.await?;
@ -639,7 +480,7 @@ async fn flatten(
path: web::Path<String>,
) -> Result<Json<Vec<index::Song>>, APIError> {
let songs = block(move || {
let path = percent_decode_str(&(path.0)).decode_utf8_lossy();
let path = percent_decode_str(&path).decode_utf8_lossy();
index.flatten(Path::new(path.as_ref()))
})
.await?;
@ -682,17 +523,16 @@ async fn get_audio(
vfs_manager: Data<vfs::Manager>,
_auth: Auth,
path: web::Path<String>,
) -> Result<NamedFile, APIError> {
) -> Result<MediaFile, APIError> {
let audio_path = block(move || {
let vfs = vfs_manager.get_vfs()?;
let path = percent_decode_str(&(path.0)).decode_utf8_lossy();
let path = percent_decode_str(&path).decode_utf8_lossy();
vfs.virtual_to_real(Path::new(path.as_ref()))
.map_err(|_| APIError::VFSPathNotFound)
})
.await?;
let named_file = NamedFile::open(&audio_path).map_err(|_| APIError::AudioFileIOError)?;
Ok(named_file)
Ok(MediaFile::new(named_file))
}
#[get("/thumbnail/{path:.*}")]
@ -702,26 +542,23 @@ async fn get_thumbnail(
_auth: Auth,
path: web::Path<String>,
options_input: web::Query<dto::ThumbnailOptions>,
) -> Result<NamedFile, APIError> {
let mut options = thumbnail::Options::default();
options.pad_to_square = options_input.pad.unwrap_or(options.pad_to_square);
) -> Result<MediaFile, APIError> {
let options = thumbnail::Options::from(options_input.0);
let thumbnail_path = block(move || {
let thumbnail_path = block(move || -> Result<PathBuf, APIError> {
let vfs = vfs_manager.get_vfs()?;
let path = percent_decode_str(&(path.0)).decode_utf8_lossy();
let image_path = vfs
.virtual_to_real(Path::new(path.as_ref()))
.map_err(|_| APIError::VFSPathNotFound)?;
let path = percent_decode_str(&path).decode_utf8_lossy();
let image_path = vfs.virtual_to_real(Path::new(path.as_ref()))?;
thumbnails_manager
.get_thumbnail(&image_path, &options)
.map_err(|_| APIError::Unspecified)
.map_err(|e| e.into())
})
.await?;
let named_file =
NamedFile::open(&thumbnail_path).map_err(|_| APIError::ThumbnailFileIOError)?;
Ok(named_file)
Ok(MediaFile::new(named_file))
}
#[get("/playlists")]
@ -780,7 +617,7 @@ async fn lastfm_now_playing(
if !user_manager.is_lastfm_linked(&auth.username) {
return Err(APIError::LastFMAccountNotLinked);
}
let path = percent_decode_str(&(path.0)).decode_utf8_lossy();
let path = percent_decode_str(&path).decode_utf8_lossy();
lastfm_manager.now_playing(&auth.username, Path::new(path.as_ref()))?;
Ok(())
})
@ -799,7 +636,7 @@ async fn lastfm_scrobble(
if !user_manager.is_lastfm_linked(&auth.username) {
return Err(APIError::LastFMAccountNotLinked);
}
let path = percent_decode_str(&(path.0)).decode_utf8_lossy();
let path = percent_decode_str(&path).decode_utf8_lossy();
lastfm_manager.scrobble(&auth.username, Path::new(path.as_ref()))?;
Ok(())
})

View file

@ -1,8 +1,7 @@
use actix_test::TestServer;
use actix_web::{
middleware::{Compress, Logger},
rt::{System, SystemRunner},
test,
test::*,
web::Bytes,
App as ActixApp,
};
@ -44,7 +43,7 @@ impl ActixTestService {
.timeout(std::time::Duration::from_secs(30));
for (name, value) in request.headers() {
actix_request = actix_request.set_header(name, value.clone());
actix_request = actix_request.insert_header((name, value.clone()));
}
if let Some(ref authorization) = self.authorization {
@ -92,8 +91,8 @@ impl TestService for ActixTestService {
let app = App::new(5050, paths).unwrap();
let system_runner = System::new("test");
let server = test::start(move || {
let system_runner = System::new();
let server = actix_test::start(move || {
let config = make_config(app.clone());
ActixApp::new()
.wrap(Logger::default())

View file

@ -1,20 +1,18 @@
use serde::{Deserialize, Serialize};
use crate::app::{config, ddns, settings, user, vfs};
use crate::app::{config, ddns, settings, thumbnail, user, vfs};
use std::convert::From;
pub const API_MAJOR_VERSION: i32 = 6;
pub const API_MAJOR_VERSION: i32 = 7;
pub const API_MINOR_VERSION: i32 = 0;
pub const COOKIE_SESSION: &str = "session";
pub const COOKIE_USERNAME: &str = "username";
pub const COOKIE_ADMIN: &str = "admin";
#[derive(PartialEq, Debug, Serialize, Deserialize)]
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct Version {
pub major: i32,
pub minor: i32,
}
#[derive(PartialEq, Debug, Serialize, Deserialize)]
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct InitialSetup {
pub has_any_users: bool,
}
@ -39,10 +37,39 @@ pub struct AuthQueryParameters {
#[derive(Serialize, Deserialize)]
pub struct ThumbnailOptions {
pub size: Option<ThumbnailSize>,
pub pad: Option<bool>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
impl From<ThumbnailOptions> for thumbnail::Options {
fn from(dto: ThumbnailOptions) -> Self {
let mut options = thumbnail::Options::default();
options.max_dimension = dto.size.map_or(options.max_dimension, Into::into);
options.pad_to_square = dto.pad.unwrap_or(options.pad_to_square);
options
}
}
#[derive(Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ThumbnailSize {
Small,
Large,
Native,
}
#[allow(clippy::from_over_into)]
impl Into<Option<u32>> for ThumbnailSize {
fn into(self) -> Option<u32> {
match self {
Self::Small => Some(400),
Self::Large => Some(1200),
Self::Native => None,
}
}
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct ListPlaylistsEntry {
pub name: String,
}
@ -79,7 +106,7 @@ impl From<user::User> for User {
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct NewUser {
pub name: String,
pub password: String,
@ -96,13 +123,13 @@ impl From<NewUser> for user::NewUser {
}
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct UserUpdate {
pub new_password: Option<String>,
pub new_is_admin: Option<bool>,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct DDNSConfig {
pub host: String,
pub username: String,
@ -129,7 +156,7 @@ impl From<ddns::Config> for DDNSConfig {
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct MountDir {
pub source: String,
pub name: String,
@ -153,7 +180,7 @@ impl From<vfs::MountDir> for MountDir {
}
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct Config {
pub settings: Option<NewSettings>,
pub users: Option<Vec<NewUser>>,
@ -174,7 +201,7 @@ impl From<Config> for config::Config {
}
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct NewSettings {
pub album_art_pattern: Option<String>,
pub reindex_every_n_seconds: Option<i32>,
@ -189,7 +216,7 @@ impl From<NewSettings> for settings::NewSettings {
}
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct Settings {
pub album_art_pattern: String,
pub reindex_every_n_seconds: i32,
@ -198,10 +225,11 @@ pub struct Settings {
impl From<settings::Settings> for Settings {
fn from(s: settings::Settings) -> Self {
Self {
album_art_pattern: s.album_art_pattern,
reindex_every_n_seconds: s.reindex_every_n_seconds,
album_art_pattern: s.index_album_art_pattern,
reindex_every_n_seconds: s.index_sleep_duration_seconds,
}
}
}
// TODO: Preferences, CollectionFile, Song and Directory should have dto types
// TODO Song dto type should skip `None` values when serializing, to lower payload sizes by a lot

View file

@ -1,50 +1,91 @@
use std::path::PathBuf;
use thiserror::Error;
use crate::app::index::QueryError;
use crate::app::{config, playlist, settings, user};
use crate::app::{config, ddns, lastfm, playlist, settings, thumbnail, user, vfs};
use crate::db;
#[derive(Error, Debug)]
pub enum APIError {
#[error("Incorrect Credentials")]
IncorrectCredentials,
#[error("Could not encode authorization token")]
AuthorizationTokenEncoding,
#[error("Administrator permission is required")]
AdminPermissionRequired,
#[error("Audio file could not be opened")]
AudioFileIOError,
#[error("Authentication is required")]
AuthenticationRequired,
#[error("Could not encode Branca token")]
BrancaTokenEncoding,
#[error("Database error:\n\n{0}")]
Database(diesel::result::Error),
#[error("DDNS update query failed with HTTP status {0}")]
DdnsUpdateQueryFailed(u16),
#[error("Cannot delete your own account")]
DeletingOwnAccount,
#[error("EmbeddedArtworkNotFound")]
EmbeddedArtworkNotFound,
#[error("EmptyUsername")]
EmptyUsername,
#[error("EmptyPassword")]
EmptyPassword,
#[error("Cannot delete your own account")]
DeletingOwnAccount,
#[error("Cannot remove your own admin privilege")]
OwnAdminPrivilegeRemoval,
#[error("Audio file could not be opened")]
AudioFileIOError,
#[error("Thumbnail file could not be opened")]
ThumbnailFileIOError,
#[error("Incorrect Credentials")]
IncorrectCredentials,
#[error("No last.fm account has been linked")]
LastFMAccountNotLinked,
#[error("Could not decode content as base64 after linking last.fm account")]
LastFMLinkContentBase64DecodeError,
#[error("Could not decode content as UTF-8 after linking last.fm account")]
LastFMLinkContentEncodingError,
#[error("Path not found in virtual filesystem")]
VFSPathNotFound,
#[error("User not found")]
UserNotFound,
#[error("Could send Now Playing update to last.fm:\n\n{0}")]
LastFMNowPlaying(rustfm_scrobble::ScrobblerError),
#[error("Could emit scrobble with last.fm:\n\n{0}")]
LastFMScrobble(rustfm_scrobble::ScrobblerError),
#[error("Could authenticate with last.fm:\n\n{0}")]
LastFMScrobblerAuthentication(rustfm_scrobble::ScrobblerError),
#[error("Internal server error")]
Internal,
#[error("File I/O error for `{0}`:\n\n{1}")]
Io(PathBuf, std::io::Error),
#[error("Cannot remove your own admin privilege")]
OwnAdminPrivilegeRemoval,
#[error("Could not hash password")]
PasswordHashing,
#[error("Playlist not found")]
PlaylistNotFound,
#[error("Unspecified")]
Unspecified,
}
impl From<anyhow::Error> for APIError {
fn from(_: anyhow::Error) -> Self {
APIError::Unspecified
}
#[error("Settings error:\n\n{0}")]
Settings(settings::Error),
#[error("Song not found")]
SongMetadataNotFound,
#[error("Could not decode thumbnail from flac file `{0}`:\n\n{1}")]
ThumbnailFlacDecoding(PathBuf, metaflac::Error),
#[error("Thumbnail file could not be opened")]
ThumbnailFileIOError,
#[error("Could not decode thumbnail from ID3 tag in `{0}`:\n\n{1}")]
ThumbnailId3Decoding(PathBuf, id3::Error),
#[error("Could not decode image thumbnail in `{0}`:\n\n{1}")]
ThumbnailImageDecoding(PathBuf, image::error::ImageError),
#[error("Could not decode thumbnail from mp4 file `{0}`:\n\n{1}")]
ThumbnailMp4Decoding(PathBuf, mp4ameta::Error),
#[error("Toml deserialization error:\n\n{0}")]
TomlDeserialization(toml::de::Error),
#[error("Unsupported thumbnail format: `{0}`")]
UnsupportedThumbnailFormat(&'static str),
#[error("User not found")]
UserNotFound,
#[error("Path not found in virtual filesystem")]
VFSPathNotFound,
}
impl From<config::Error> for APIError {
fn from(error: config::Error) -> APIError {
match error {
config::Error::Unspecified => APIError::Unspecified,
config::Error::Ddns(e) => e.into(),
config::Error::Io(p, e) => APIError::Io(p, e),
config::Error::Settings(e) => e.into(),
config::Error::Toml(e) => APIError::TomlDeserialization(e),
config::Error::User(e) => e.into(),
config::Error::Vfs(e) => e.into(),
}
}
}
@ -52,9 +93,11 @@ impl From<config::Error> for APIError {
impl From<playlist::Error> for APIError {
fn from(error: playlist::Error) -> APIError {
match error {
playlist::Error::Database(e) => APIError::Database(e),
playlist::Error::DatabaseConnection(e) => e.into(),
playlist::Error::PlaylistNotFound => APIError::PlaylistNotFound,
playlist::Error::UserNotFound => APIError::UserNotFound,
playlist::Error::Unspecified => APIError::Unspecified,
playlist::Error::Vfs(e) => e.into(),
}
}
}
@ -62,8 +105,10 @@ impl From<playlist::Error> for APIError {
impl From<QueryError> for APIError {
fn from(error: QueryError) -> APIError {
match error {
QueryError::VFSPathNotFound => APIError::VFSPathNotFound,
QueryError::Unspecified => APIError::Unspecified,
QueryError::Database(e) => APIError::Database(e),
QueryError::DatabaseConnection(e) => e.into(),
QueryError::SongNotFound(_) => APIError::SongMetadataNotFound,
QueryError::Vfs(e) => e.into(),
}
}
}
@ -71,12 +116,12 @@ impl From<QueryError> for APIError {
impl From<settings::Error> for APIError {
fn from(error: settings::Error) -> APIError {
match error {
settings::Error::AuthSecretNotFound => APIError::Unspecified,
settings::Error::InvalidAuthSecret => APIError::Unspecified,
settings::Error::IndexSleepDurationNotFound => APIError::Unspecified,
settings::Error::IndexAlbumArtPatternNotFound => APIError::Unspecified,
settings::Error::IndexAlbumArtPatternInvalid => APIError::Unspecified,
settings::Error::Unspecified => APIError::Unspecified,
settings::Error::AuthenticationSecretNotFound => APIError::Settings(error),
settings::Error::DatabaseConnection(e) => e.into(),
settings::Error::AuthenticationSecretInvalid => APIError::Settings(error),
settings::Error::MiscSettingsNotFound => APIError::Settings(error),
settings::Error::IndexAlbumArtPatternInvalid => APIError::Settings(error),
settings::Error::Database(e) => APIError::Database(e),
}
}
}
@ -84,13 +129,76 @@ impl From<settings::Error> for APIError {
impl From<user::Error> for APIError {
fn from(error: user::Error) -> APIError {
match error {
user::Error::EmptyUsername => APIError::EmptyUsername,
user::Error::AuthorizationTokenEncoding => APIError::AuthorizationTokenEncoding,
user::Error::BrancaTokenEncoding => APIError::BrancaTokenEncoding,
user::Error::Database(e) => APIError::Database(e),
user::Error::DatabaseConnection(e) => e.into(),
user::Error::EmptyPassword => APIError::EmptyPassword,
user::Error::IncorrectUsername => APIError::IncorrectCredentials,
user::Error::IncorrectPassword => APIError::IncorrectCredentials,
user::Error::InvalidAuthToken => APIError::IncorrectCredentials,
user::Error::EmptyUsername => APIError::EmptyUsername,
user::Error::IncorrectAuthorizationScope => APIError::IncorrectCredentials,
user::Error::Unspecified => APIError::Unspecified,
user::Error::IncorrectPassword => APIError::IncorrectCredentials,
user::Error::IncorrectUsername => APIError::IncorrectCredentials,
user::Error::InvalidAuthToken => APIError::IncorrectCredentials,
user::Error::MissingLastFMSessionKey => APIError::IncorrectCredentials,
user::Error::PasswordHashing => APIError::PasswordHashing,
}
}
}
impl From<vfs::Error> for APIError {
fn from(error: vfs::Error) -> APIError {
match error {
vfs::Error::CouldNotMapToVirtualPath(_) => APIError::VFSPathNotFound,
vfs::Error::CouldNotMapToRealPath(_) => APIError::VFSPathNotFound,
vfs::Error::Database(e) => APIError::Database(e),
vfs::Error::DatabaseConnection(e) => e.into(),
}
}
}
impl From<ddns::Error> for APIError {
fn from(error: ddns::Error) -> APIError {
match error {
ddns::Error::Database(e) => APIError::Database(e),
ddns::Error::DatabaseConnection(e) => e.into(),
ddns::Error::UpdateQueryFailed(s) => APIError::DdnsUpdateQueryFailed(s),
}
}
}
impl From<db::Error> for APIError {
fn from(error: db::Error) -> APIError {
match error {
db::Error::ConnectionPoolBuild => APIError::Internal,
db::Error::ConnectionPool => APIError::Internal,
db::Error::Io(p, e) => APIError::Io(p, e),
db::Error::Migration => APIError::Internal,
}
}
}
impl From<lastfm::Error> for APIError {
fn from(error: lastfm::Error) -> APIError {
match error {
lastfm::Error::ScrobblerAuthentication(e) => APIError::LastFMScrobblerAuthentication(e),
lastfm::Error::Scrobble(e) => APIError::LastFMScrobble(e),
lastfm::Error::NowPlaying(e) => APIError::LastFMNowPlaying(e),
lastfm::Error::Query(e) => e.into(),
lastfm::Error::User(e) => e.into(),
}
}
}
impl From<thumbnail::Error> for APIError {
fn from(error: thumbnail::Error) -> APIError {
match error {
thumbnail::Error::EmbeddedArtworkNotFound(_) => APIError::EmbeddedArtworkNotFound,
thumbnail::Error::Id3(p, e) => APIError::ThumbnailId3Decoding(p, e),
thumbnail::Error::Image(p, e) => APIError::ThumbnailImageDecoding(p, e),
thumbnail::Error::Io(p, e) => APIError::Io(p, e),
thumbnail::Error::Metaflac(p, e) => APIError::ThumbnailFlacDecoding(p, e),
thumbnail::Error::Mp4aMeta(p, e) => APIError::ThumbnailMp4Decoding(p, e),
thumbnail::Error::UnsupportedFormat(f) => APIError::UnsupportedThumbnailFormat(f),
}
}
}

View file

@ -93,7 +93,7 @@ pub trait TestService {
let browse_request = protocol::browse(Path::new(""));
let response = self.fetch_json::<(), Vec<index::CollectionFile>>(&browse_request);
let entries = response.body();
if entries.len() > 0 {
if !entries.is_empty() {
break;
}
std::thread::sleep(Duration::from_secs(1));
@ -103,7 +103,7 @@ pub trait TestService {
let flatten_request = protocol::flatten(Path::new(""));
let response = self.fetch_json::<_, Vec<index::Song>>(&flatten_request);
let entries = response.body();
if entries.len() > 0 {
if !entries.is_empty() {
break;
}
std::thread::sleep(Duration::from_secs(1));
@ -112,7 +112,7 @@ pub trait TestService {
}
fn add_trailing_slash<T>(request: &mut Request<T>) {
*request.uri_mut() = (request.uri().to_string().trim_end_matches("/").to_string() + "/")
*request.uri_mut() = (request.uri().to_string().trim_end_matches('/').to_string() + "/")
.parse()
.unwrap();
}

View file

@ -1,56 +1,10 @@
use cookie::Cookie;
use headers::{self, HeaderMapExt};
use http::{Response, StatusCode};
use time::Duration;
use http::StatusCode;
use crate::service::dto;
use crate::service::test::{constants::*, protocol, ServiceType, TestService};
use crate::test_name;
fn validate_added_cookies<T>(response: &Response<T>) {
let twenty_years = Duration::days(365 * 20);
let cookies: Vec<Cookie> = response
.headers()
.get_all(http::header::SET_COOKIE)
.iter()
.map(|c| Cookie::parse(c.to_str().unwrap()).unwrap())
.collect();
let session = cookies
.iter()
.find(|c| c.name() == dto::COOKIE_SESSION)
.unwrap();
assert_ne!(session.value(), TEST_USERNAME);
assert!(session.max_age().unwrap() >= twenty_years);
let username = cookies
.iter()
.find(|c| c.name() == dto::COOKIE_USERNAME)
.unwrap();
assert_eq!(username.value(), TEST_USERNAME);
assert!(session.max_age().unwrap() >= twenty_years);
let is_admin = cookies
.iter()
.find(|c| c.name() == dto::COOKIE_ADMIN)
.unwrap();
assert_eq!(is_admin.value(), false.to_string());
assert!(session.max_age().unwrap() >= twenty_years);
}
fn validate_no_cookies<T>(response: &Response<T>) {
let cookies: Vec<Cookie> = response
.headers()
.get_all(http::header::SET_COOKIE)
.iter()
.map(|c| Cookie::parse(c.to_str().unwrap()).unwrap())
.collect();
assert!(!cookies.iter().any(|c| c.name() == dto::COOKIE_SESSION));
assert!(!cookies.iter().any(|c| c.name() == dto::COOKIE_USERNAME));
assert!(!cookies.iter().any(|c| c.name() == dto::COOKIE_ADMIN));
}
#[test]
fn login_rejects_bad_username() {
let mut service = ServiceType::new(&test_name!());
@ -82,64 +36,8 @@ fn login_golden_path() {
let authorization = response.body();
assert_eq!(authorization.username, TEST_USERNAME);
assert_eq!(authorization.is_admin, false);
assert!(!authorization.is_admin);
assert!(!authorization.token.is_empty());
validate_added_cookies(&response);
}
#[test]
fn requests_without_auth_header_do_not_set_cookies() {
let mut service = ServiceType::new(&test_name!());
service.complete_initial_setup();
service.login();
let request = protocol::random();
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::OK);
validate_no_cookies(&response);
}
#[test]
fn authentication_via_basic_http_header_rejects_bad_username() {
let mut service = ServiceType::new(&test_name!());
service.complete_initial_setup();
let mut request = protocol::random();
let basic = headers::Authorization::basic("garbage", TEST_PASSWORD);
request.headers_mut().typed_insert(basic);
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[test]
fn authentication_via_basic_http_header_rejects_bad_password() {
let mut service = ServiceType::new(&test_name!());
service.complete_initial_setup();
let mut request = protocol::random();
let basic = headers::Authorization::basic(TEST_PASSWORD, "garbage");
request.headers_mut().typed_insert(basic);
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[test]
fn authentication_via_basic_http_header_golden_path() {
let mut service = ServiceType::new(&test_name!());
service.complete_initial_setup();
let mut request = protocol::random();
let basic = headers::Authorization::basic(TEST_USERNAME, TEST_PASSWORD);
request.headers_mut().typed_insert(basic);
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::OK);
validate_added_cookies(&response);
}
#[test]
@ -174,8 +72,6 @@ fn authentication_via_bearer_http_header_golden_path() {
request.headers_mut().typed_insert(bearer);
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::OK);
validate_no_cookies(&response);
}
#[test]
@ -213,6 +109,4 @@ fn authentication_via_query_param_golden_path() {
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::OK);
validate_no_cookies(&response);
}

View file

@ -1,6 +1,7 @@
use http::{header, HeaderValue, StatusCode};
use std::path::PathBuf;
use crate::service::dto::ThumbnailSize;
use crate::service::test::{constants::*, protocol, ServiceType, TestService};
use crate::test_name;
@ -33,6 +34,39 @@ fn audio_golden_path() {
let response = service.fetch_bytes(&request);
assert_eq!(response.status(), StatusCode::OK);
assert_eq!(response.body().len(), 24_142);
assert_eq!(
response.headers().get(header::CONTENT_LENGTH).unwrap(),
"24142"
);
}
#[test]
fn audio_does_not_encode_content() {
let mut service = ServiceType::new(&test_name!());
service.complete_initial_setup();
service.login_admin();
service.index();
service.login();
let path: PathBuf = [TEST_MOUNT_NAME, "Khemmis", "Hunted", "02 - Candlelight.mp3"]
.iter()
.collect();
let mut request = protocol::audio(&path);
let headers = request.headers_mut();
headers.append(
header::ACCEPT_ENCODING,
HeaderValue::from_str("gzip, deflate, br").unwrap(),
);
let response = service.fetch_bytes(&request);
assert_eq!(response.status(), StatusCode::OK);
assert_eq!(response.body().len(), 24_142);
assert_eq!(response.headers().get(header::TRANSFER_ENCODING), None);
assert_eq!(
response.headers().get(header::CONTENT_LENGTH).unwrap(),
"24142"
);
}
#[test]
@ -84,8 +118,9 @@ fn thumbnail_requires_auth() {
.iter()
.collect();
let size = None;
let pad = None;
let request = protocol::thumbnail(&path, pad);
let request = protocol::thumbnail(&path, size, pad);
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
@ -102,8 +137,9 @@ fn thumbnail_golden_path() {
.iter()
.collect();
let size = None;
let pad = None;
let request = protocol::thumbnail(&path, pad);
let request = protocol::thumbnail(&path, size, pad);
let response = service.fetch_bytes(&request);
assert_eq!(response.status(), StatusCode::OK);
}
@ -116,8 +152,50 @@ fn thumbnail_bad_path_returns_not_found() {
let path: PathBuf = ["not_my_collection"].iter().collect();
let size = None;
let pad = None;
let request = protocol::thumbnail(&path, pad);
let request = protocol::thumbnail(&path, size, pad);
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[test]
fn thumbnail_size_default() {
thumbnail_size(&test_name!(), None, None, 400);
}
#[test]
fn thumbnail_size_small() {
thumbnail_size(&test_name!(), Some(ThumbnailSize::Small), None, 400);
}
#[test]
#[cfg(not(tarpaulin))]
fn thumbnail_size_large() {
thumbnail_size(&test_name!(), Some(ThumbnailSize::Large), None, 1200);
}
#[test]
#[cfg(not(tarpaulin))]
fn thumbnail_size_native() {
thumbnail_size(&test_name!(), Some(ThumbnailSize::Native), None, 1423);
}
fn thumbnail_size(name: &str, size: Option<ThumbnailSize>, pad: Option<bool>, expected: u32) {
let mut service = ServiceType::new(name);
service.complete_initial_setup();
service.login_admin();
service.index();
service.login();
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic", "Folder.png"]
.iter()
.collect();
let request = protocol::thumbnail(&path, size, pad);
let response = service.fetch_bytes(&request);
assert_eq!(response.status(), StatusCode::OK);
let thumbnail = image::load_from_memory(response.body()).unwrap().to_rgb8();
assert_eq!(thumbnail.width(), expected);
assert_eq!(thumbnail.height(), expected);
}

View file

@ -1,9 +1,9 @@
use http::{method::Method, Request};
use http::{Method, Request};
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
use std::path::Path;
use crate::app::user;
use crate::service::dto;
use crate::{app::user, service::dto::ThumbnailSize};
pub fn web_index() -> Request<()> {
Request::builder()
@ -200,14 +200,32 @@ pub fn audio(path: &Path) -> Request<()> {
.unwrap()
}
pub fn thumbnail(path: &Path, pad: Option<bool>) -> Request<()> {
pub fn thumbnail(path: &Path, size: Option<ThumbnailSize>, pad: Option<bool>) -> Request<()> {
let path = path.to_string_lossy();
let mut endpoint = format!("/api/thumbnail/{}", url_encode(path.as_ref()));
match pad {
Some(true) => endpoint.push_str("?pad=true"),
Some(false) => endpoint.push_str("?pad=false"),
None => (),
};
let mut params = String::new();
if let Some(s) = size {
params.push('?');
match s {
ThumbnailSize::Small => params.push_str("size=small"),
ThumbnailSize::Large => params.push_str("size=large"),
ThumbnailSize::Native => params.push_str("size=native"),
};
}
if let Some(p) = pad {
if params.is_empty() {
params.push('?');
} else {
params.push('&');
}
if p {
params.push_str("pad=true");
} else {
params.push_str("pad=false");
};
}
let endpoint = format!("/api/thumbnail/{}{}", url_encode(path.as_ref()), params);
Request::builder()
.method(Method::GET)
.uri(&endpoint)

View file

@ -1,6 +1,6 @@
use http::StatusCode;
use crate::service::dto;
use crate::service::dto::{self, Settings};
use crate::service::test::{protocol, ServiceType, TestService};
use crate::test_name;
@ -61,7 +61,21 @@ fn put_settings_golden_path() {
service.complete_initial_setup();
service.login_admin();
let request = protocol::put_settings(dto::NewSettings::default());
let request = protocol::put_settings(dto::NewSettings {
album_art_pattern: Some("test_pattern".to_owned()),
reindex_every_n_seconds: Some(31),
});
let response = service.fetch(&request);
assert_eq!(response.status(), StatusCode::OK);
let request = protocol::get_settings();
let response = service.fetch_json::<_, dto::Settings>(&request);
let settings = response.body();
assert_eq!(
settings,
&Settings {
album_art_pattern: "test_pattern".to_owned(),
reindex_every_n_seconds: 31,
},
);
}

View file

@ -8,8 +8,7 @@ fn can_get_swagger_index() {
let mut service = ServiceType::new(&test_name!());
let request = protocol::swagger_index();
let response = service.fetch(&request);
let status = response.status();
assert_eq!(status, StatusCode::FOUND);
assert_eq!(response.status(), StatusCode::OK);
}
#[test]

View file

@ -7,7 +7,7 @@ macro_rules! test_name {
let file_name = file_name.replace("/", "-");
let file_name = file_name.replace("\\", "-");
format!("{}-line-{}", file_name, line!())
}};
}};
}
pub fn prepare_test_directory<T: AsRef<str>>(test_name: T) -> PathBuf {
@ -16,5 +16,5 @@ pub fn prepare_test_directory<T: AsRef<str>>(test_name: T) -> PathBuf {
std::fs::remove_dir_all(&output_dir).unwrap();
}
std::fs::create_dir_all(&output_dir).unwrap();
return output_dir;
output_dir
}

View file

@ -12,7 +12,8 @@ macro_rules! match_ignore_case {
}
pub use crate::match_ignore_case;
#[derive(Debug, PartialEq)]
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, PartialEq, Eq)]
pub enum AudioFormat {
AIFF,
APE,

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.