Compare commits
47 Commits
v0.2.1
...
56760be586
| Author | SHA1 | Date | |
|---|---|---|---|
| 56760be586 | |||
| 108c374c6d | |||
| 2129dc8007 | |||
| cc3ef04cbe | |||
| a730ab568c | |||
|
|
c30a3aff5d | ||
|
|
71d88bacf2 | ||
| 5fb8821709 | |||
| b1eaa1b6e9 | |||
| 7c2c7b0ce5 | |||
|
|
4f239c2546 | ||
|
|
cfcf6e4029 | ||
| a7af27d064 | |||
| e1210e6e20 | |||
| 12d28170d2 | |||
| 0ba1caaa23 | |||
| e1782a6e3b | |||
| 4a272f373d | |||
| f4fa01ef7e | |||
| a50efd0082 | |||
| a4010e1173 | |||
| 6e2155d8bd | |||
| cd842eb7ac | |||
| ff3ad15b95 | |||
| d5068aaa33 | |||
| 8a49a5013b | |||
|
|
85b3cb6852 | ||
|
|
bfc0675f5a | ||
|
|
722183047d | ||
|
|
106ab96c56 | ||
|
|
cbc5639f99 | ||
|
|
754097f894 | ||
|
|
b761245fd0 | ||
|
|
0f49d8d079 | ||
|
|
a17ff322ad | ||
|
|
707ef85e5d | ||
|
|
ec4c53497f | ||
|
|
46ba3d5490 | ||
| f2d42751fd | |||
| bc34b6bc41 | |||
| 3ee7235b51 | |||
| 1f85d9c435 | |||
| 773e9c1ee7 | |||
| 0242376a65 | |||
|
|
64f292c7b1 | ||
| 73b6d7483e | |||
|
|
64b59ba72d |
3
.env_example
Normal file
3
.env_example
Normal file
@@ -0,0 +1,3 @@
|
||||
OLLAMA_URL=https://ollama.host.com
|
||||
OLLAMA_AUTH="Basic <BASE64 Auth string>"
|
||||
#OLLAMA_AUTH="Bearer <TOKEN>"
|
||||
63
.github/workflows/deb-publish.yml_
vendored
Normal file
63
.github/workflows/deb-publish.yml_
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: Build and Publish Deb Package
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
jobs:
|
||||
build-deb:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Enable universe and install dependencies
|
||||
run: |
|
||||
sudo add-apt-repository universe -y
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y fuse3 libfuse3-dev pkg-config protobuf-compiler cmake
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-cargo-
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb --locked
|
||||
|
||||
- name: Extract version from tag
|
||||
id: version
|
||||
run: echo "version=${GITHUB_REF_NAME#v}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build deb package
|
||||
run: |
|
||||
cargo deb -p furumi-mount-linux \
|
||||
--deb-version ${{ steps.version.outputs.version }}
|
||||
|
||||
- name: Locate deb file
|
||||
id: deb
|
||||
run: |
|
||||
DEB=$(ls target/debian/furumi-mount-linux_*.deb | head -1)
|
||||
echo "path=$DEB" >> "$GITHUB_OUTPUT"
|
||||
echo "name=$(basename $DEB)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Publish to Gitea APT registry
|
||||
run: |
|
||||
TARGET_URL="${{ secrets.PKG_REGISTRY_URL }}/api/packages/${{ secrets.PKG_OWNER }}/debian/pool/noble/main/upload"
|
||||
echo "Uploading to: $TARGET_URL"
|
||||
curl --fail-with-body \
|
||||
--user "${{ secrets.PKG_USER }}:${{ secrets.PKG_TOKEN }}" \
|
||||
--upload-file "${{ steps.deb.outputs.path }}" \
|
||||
"$TARGET_URL"
|
||||
50
.github/workflows/docker-publish-agent-dev.yml
vendored
Normal file
50
.github/workflows/docker-publish-agent-dev.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Publish Metadata Agent Image (dev)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- DEV
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/furumi-metadata-agent
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Determine version and tags
|
||||
id: info
|
||||
run: |
|
||||
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
SHORT_SHA="$(echo '${{ github.sha }}' | cut -c1-7)"
|
||||
echo "tags=${IMAGE}:dev" >> "$GITHUB_OUTPUT"
|
||||
echo "version=dev-${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile.agent
|
||||
push: true
|
||||
tags: ${{ steps.info.outputs.tags }}
|
||||
build-args: |
|
||||
FURUMI_VERSION=${{ steps.info.outputs.version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
61
.github/workflows/docker-publish-agent.yml
vendored
Normal file
61
.github/workflows/docker-publish-agent.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Publish Metadata Agent Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '**'
|
||||
- '!DEV'
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/furumi-metadata-agent
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Determine version and tags
|
||||
id: info
|
||||
run: |
|
||||
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
SHORT_SHA="$(echo '${{ github.sha }}' | cut -c1-7)"
|
||||
|
||||
if [[ "${{ github.ref }}" == refs/tags/v* ]]; then
|
||||
TAG="${{ github.ref_name }}"
|
||||
VERSION="${TAG#v}"
|
||||
echo "tags=${IMAGE}:${VERSION},${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "tags=${IMAGE}:trunk,${IMAGE}:${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile.agent
|
||||
push: true
|
||||
tags: ${{ steps.info.outputs.tags }}
|
||||
build-args: |
|
||||
FURUMI_VERSION=${{ steps.info.outputs.version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
@@ -1,13 +1,13 @@
|
||||
name: Publish Server Image
|
||||
name: Publish Web Player Image (dev)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
branches:
|
||||
- DEV
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/furumi-server
|
||||
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/furumi-web-player
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
@@ -29,22 +29,22 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha,format=short
|
||||
- name: Determine version and tags
|
||||
id: info
|
||||
run: |
|
||||
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
SHORT_SHA="$(echo '${{ github.sha }}' | cut -c1-7)"
|
||||
echo "tags=${IMAGE}:dev" >> "$GITHUB_OUTPUT"
|
||||
echo "version=dev-${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile.web-player
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.info.outputs.tags }}
|
||||
build-args: |
|
||||
FURUMI_VERSION=${{ steps.info.outputs.version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
61
.github/workflows/docker-publish-player.yml
vendored
Normal file
61
.github/workflows/docker-publish-player.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Publish Web Player Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '**'
|
||||
- '!DEV'
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/furumi-web-player
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Determine version and tags
|
||||
id: info
|
||||
run: |
|
||||
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
SHORT_SHA="$(echo '${{ github.sha }}' | cut -c1-7)"
|
||||
|
||||
if [[ "${{ github.ref }}" == refs/tags/v* ]]; then
|
||||
TAG="${{ github.ref_name }}"
|
||||
VERSION="${TAG#v}"
|
||||
echo "tags=${IMAGE}:${VERSION},${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "tags=${IMAGE}:trunk,${IMAGE}:${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile.web-player
|
||||
push: true
|
||||
tags: ${{ steps.info.outputs.tags }}
|
||||
build-args: |
|
||||
FURUMI_VERSION=${{ steps.info.outputs.version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
60
.github/workflows/docker-publish.yml_
vendored
Normal file
60
.github/workflows/docker-publish.yml_
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: Publish Server Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '**'
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/furumi-server
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Determine version and tags
|
||||
id: info
|
||||
run: |
|
||||
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
SHORT_SHA="$(echo '${{ github.sha }}' | cut -c1-7)"
|
||||
|
||||
if [[ "${{ github.ref }}" == refs/tags/v* ]]; then
|
||||
TAG="${{ github.ref_name }}"
|
||||
VERSION="${TAG#v}"
|
||||
echo "tags=${IMAGE}:${VERSION},${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "tags=${IMAGE}:trunk,${IMAGE}:${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${SHORT_SHA}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.info.outputs.tags }}
|
||||
build-args: |
|
||||
FURUMI_VERSION=${{ steps.info.outputs.version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1 +1,4 @@
|
||||
/target
|
||||
/inbox
|
||||
/storage
|
||||
.env
|
||||
|
||||
216
CHIPTUNE.md
Normal file
216
CHIPTUNE.md
Normal file
@@ -0,0 +1,216 @@
|
||||
# Chiptune Support Implementation Plan
|
||||
|
||||
## Overview
|
||||
|
||||
Add playback support for tracker/chiptune module formats (MOD, XM, S3M, IT, MPTM) to the
|
||||
Furumi web player. The implementation consists of two parts:
|
||||
|
||||
1. **Server-side** — lightweight metadata parser in pure Rust (zero external dependencies)
|
||||
2. **Client-side** — playback via libopenmpt WebAssembly using AudioWorklet API
|
||||
|
||||
## Supported Formats
|
||||
|
||||
| Format | Extension | Origin |
|
||||
|--------|-----------|--------|
|
||||
| MOD | `.mod` | Amiga ProTracker |
|
||||
| XM | `.xm` | FastTracker II |
|
||||
| S3M | `.s3m` | Scream Tracker 3 |
|
||||
| IT | `.it` | Impulse Tracker |
|
||||
| MPTM | `.mptm` | OpenMPT |
|
||||
|
||||
## Part 1: Server-Side Metadata Parser
|
||||
|
||||
### Rationale
|
||||
|
||||
libopenmpt must NOT be a server dependency. All tracker formats store metadata at fixed byte
|
||||
offsets in their headers, making manual parsing trivial. Reading the first ~400 bytes of a file
|
||||
is sufficient to extract all available metadata.
|
||||
|
||||
### Extracted Fields
|
||||
|
||||
- **Title** — song name embedded in the module header
|
||||
- **Channels** — number of active audio channels
|
||||
- **Patterns** — number of unique patterns in the module
|
||||
- **Message** — song message/comment (IT/MPTM only)
|
||||
|
||||
Note: none of these formats have a dedicated "artist" field. Author information, when present,
|
||||
is typically found in the IT/MPTM song message.
|
||||
|
||||
### Binary Format Reference
|
||||
|
||||
#### MOD
|
||||
|
||||
| Offset | Size | Field |
|
||||
|--------|------|-------|
|
||||
| 0 | 20 | Song title (space/null padded) |
|
||||
| 952 | 128 | Pattern order table |
|
||||
| 1080 | 4 | Signature (determines channel count) |
|
||||
|
||||
Channel count is derived from the 4-byte signature at offset 1080:
|
||||
|
||||
- `M.K.`, `M!K!`, `FLT4`, `4CHN` → 4 channels
|
||||
- `6CHN` → 6, `8CHN` / `OCTA` → 8
|
||||
- `xCHN` → x channels, `xxCH` → xx channels
|
||||
|
||||
Pattern count = max value in the order table (128 bytes at offset 952) + 1.
|
||||
|
||||
#### XM
|
||||
|
||||
All multi-byte values are little-endian.
|
||||
|
||||
| Offset | Size | Field |
|
||||
|--------|------|-------|
|
||||
| 0 | 17 | Magic: `"Extended Module: "` |
|
||||
| 17 | 20 | Module name |
|
||||
| 58 | 2 | Version number |
|
||||
| 68 | 2 | Number of channels |
|
||||
| 70 | 2 | Number of patterns |
|
||||
|
||||
#### S3M
|
||||
|
||||
| Offset | Size | Field |
|
||||
|--------|------|-------|
|
||||
| 0x00 | 28 | Song title (null-terminated) |
|
||||
| 0x1C | 1 | Signature byte (`0x1A`) |
|
||||
| 0x24 | 2 | Pattern count (LE u16) |
|
||||
| 0x2C | 4 | Magic: `"SCRM"` |
|
||||
| 0x40 | 32 | Channel settings |
|
||||
|
||||
Channel count = number of entries in channel settings (32 bytes) that are not `0xFF`.
|
||||
|
||||
#### IT
|
||||
|
||||
| Offset | Size | Field |
|
||||
|--------|------|-------|
|
||||
| 0x00 | 4 | Magic: `"IMPM"` |
|
||||
| 0x04 | 26 | Song title (null-terminated) |
|
||||
| 0x26 | 2 | Pattern count (LE u16) |
|
||||
| 0x2E | 2 | Special flags (bit 0 = message attached) |
|
||||
| 0x36 | 2 | Message length |
|
||||
| 0x38 | 4 | Message file offset |
|
||||
| 0x40 | 64 | Channel panning table |
|
||||
|
||||
Channel count = number of entries in channel panning (64 bytes) with value < 128.
|
||||
|
||||
Song message: if `special & 1`, read `message_length` bytes from `message_offset`. Uses `\r`
|
||||
(0x0D) as line separator.
|
||||
|
||||
#### MPTM
|
||||
|
||||
Parsed identically to IT. Detection:
|
||||
|
||||
- Legacy: magic `tpm.` instead of `IMPM`
|
||||
- Modern: magic `IMPM` with tracker version (offset 0x28) in range `0x0889..=0x0FFF`
|
||||
|
||||
### Integration Points
|
||||
|
||||
- **`browse.rs`** — add tracker extensions to the audio file whitelist
|
||||
- **`meta.rs`** — add a chiptune metadata branch that runs before Symphonia (which does not
|
||||
support tracker formats); return title, channel count, pattern count, and message
|
||||
- **`stream.rs`** — serve tracker files as-is (no server-side transcoding); these files are
|
||||
typically under 1 MB
|
||||
|
||||
### Implementation Notes
|
||||
|
||||
- Zero external crate dependencies — only `std::io::Read` + `std::io::Seek`
|
||||
- Read at most the first 1084 bytes for header parsing (MOD needs offset 1080 + 4 byte sig)
|
||||
- For IT/MPTM messages, a second seek to `message_offset` is needed
|
||||
- All strings should be trimmed of null bytes and trailing whitespace
|
||||
- Expected code size: ~200–300 lines of Rust
|
||||
|
||||
## Part 2: Client-Side Playback via libopenmpt WASM
|
||||
|
||||
### Rationale
|
||||
|
||||
Browsers cannot decode tracker formats natively. libopenmpt compiled to WebAssembly decodes
|
||||
modules into PCM samples which are then rendered through the Web Audio API. Client-side
|
||||
decoding keeps the server dependency-free and enables interactive features (pattern display,
|
||||
channel visualization) in the future.
|
||||
|
||||
### libopenmpt WASM Source
|
||||
|
||||
Use the **chiptune3** library (npm: `chiptune3`, by DrSnuggles) which bundles libopenmpt as a
|
||||
self-contained AudioWorklet-compatible ES6 module.
|
||||
|
||||
Package contents:
|
||||
|
||||
| File | Size | Purpose |
|
||||
|------|------|---------|
|
||||
| `chiptune3.js` | ~4 KB | Main API (load, play, pause, seek) |
|
||||
| `chiptune3.worklet.js` | ~12 KB | AudioWorklet processor glue |
|
||||
| `libopenmpt.worklet.js` | ~1.7 MB | libopenmpt WASM + JS (single-file bundle) |
|
||||
|
||||
Available via jsDelivr CDN or can be vendored into the project.
|
||||
|
||||
If a newer libopenmpt version is needed, the official project provides source tarballs with an
|
||||
Emscripten build target:
|
||||
|
||||
```
|
||||
make CONFIG=emscripten EMSCRIPTEN_TARGET=audioworkletprocessor
|
||||
```
|
||||
|
||||
This produces a single ES6 module with WASM embedded inline (`SINGLE_FILE=1`), which is
|
||||
required because AudioWorklet contexts cannot fetch separate `.wasm` files.
|
||||
|
||||
### Playback Architecture
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────┐
|
||||
│ player.html │
|
||||
│ │
|
||||
│ Format detection (by file extension) │
|
||||
│ ┌─────────────────────┐ ┌────────────────────────────┐ │
|
||||
│ │ Standard audio │ │ Tracker module │ │
|
||||
│ │ (mp3/flac/ogg/...) │ │ (mod/xm/s3m/it/mptm) │ │
|
||||
│ │ │ │ │ │
|
||||
│ │ <audio> element │ │ fetch() → ArrayBuffer │ │
|
||||
│ │ src=/api/stream/path │ │ ↓ │ │
|
||||
│ │ │ │ libopenmpt WASM decode │ │
|
||||
│ │ │ │ ↓ │ │
|
||||
│ │ │ │ AudioWorkletProcessor │ │
|
||||
│ │ │ │ ↓ │ │
|
||||
│ │ ↓ │ │ AudioContext.destination │ │
|
||||
│ └────────┼─────────────┘ └────────────┼───────────────┘ │
|
||||
│ └──────────┬──────────────────┘ │
|
||||
│ ↓ │
|
||||
│ Player controls │
|
||||
│ (play/pause/seek/volume) │
|
||||
│ MediaSession API │
|
||||
└──────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Integration Points
|
||||
|
||||
- **`player.html`** — detect tracker format by extension; use chiptune3 API instead of
|
||||
`<audio>` element for tracker files; unify transport controls (play/pause/seek/volume)
|
||||
across both playback engines
|
||||
- **WASM assets** — serve `chiptune3.js`, `chiptune3.worklet.js`, and
|
||||
`libopenmpt.worklet.js` via a static file endpoint or embed them inline
|
||||
- **`mod.rs`** (routes) — add endpoint for serving WASM assets if not embedded
|
||||
|
||||
### Player Integration Details
|
||||
|
||||
The player must abstract over two playback backends behind a common interface:
|
||||
|
||||
```
|
||||
play(path) — start playback (auto-detect engine by extension)
|
||||
pause() — pause current playback
|
||||
resume() — resume current playback
|
||||
seek(seconds) — seek to position
|
||||
setVolume(v) — set volume (0.0–1.0)
|
||||
getDuration() — total duration in seconds
|
||||
getPosition() — current position in seconds
|
||||
isPlaying() — playback state
|
||||
onEnded(cb) — callback when track finishes
|
||||
```
|
||||
|
||||
For tracker modules, `getDuration()` and `getPosition()` are provided by libopenmpt's
|
||||
`get_duration_seconds()` and `get_position_seconds()` APIs.
|
||||
|
||||
### Considerations
|
||||
|
||||
- Tracker files are small (typically < 1 MB) — fetch the entire file before playback; no
|
||||
streaming/range-request needed
|
||||
- AudioWorklet requires a secure context (HTTPS or localhost)
|
||||
- The WASM bundle is ~1.7 MB — load it lazily on first tracker file playback
|
||||
- MediaSession API metadata should display module title from `/api/meta` response
|
||||
2422
Cargo.lock
generated
2422
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -4,11 +4,15 @@ members = [
|
||||
"furumi-server",
|
||||
"furumi-client-core",
|
||||
"furumi-mount-linux",
|
||||
"furumi-mount-macos"
|
||||
"furumi-mount-macos",
|
||||
"furumi-agent",
|
||||
"furumi-web-player",
|
||||
]
|
||||
default-members = [
|
||||
"furumi-common",
|
||||
"furumi-server",
|
||||
"furumi-client-core",
|
||||
"furumi-agent",
|
||||
"furumi-web-player",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -14,8 +14,10 @@ WORKDIR /usr/src/app
|
||||
# Option: Copy in root workspace files and source crates
|
||||
COPY . .
|
||||
|
||||
ARG FURUMI_VERSION=dev
|
||||
|
||||
# Build only the server for release
|
||||
RUN cargo build --release --bin furumi-server
|
||||
RUN FURUMI_VERSION=${FURUMI_VERSION} cargo build --release --bin furumi-server
|
||||
|
||||
# Stage 2: Create the minimal runtime image
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
32
Dockerfile.agent
Normal file
32
Dockerfile.agent
Normal file
@@ -0,0 +1,32 @@
|
||||
FROM rust:1.88.0-bookworm AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
protobuf-compiler \
|
||||
cmake \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
|
||||
ARG FURUMI_VERSION=dev
|
||||
RUN FURUMI_VERSION=${FURUMI_VERSION} cargo build --release --bin furumi-agent
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN useradd -ms /bin/bash appuser
|
||||
WORKDIR /home/appuser
|
||||
|
||||
COPY --from=builder /usr/src/app/target/release/furumi-agent /usr/local/bin/furumi-agent
|
||||
|
||||
USER appuser
|
||||
|
||||
EXPOSE 8090
|
||||
|
||||
ENTRYPOINT ["furumi-agent"]
|
||||
32
Dockerfile.web-player
Normal file
32
Dockerfile.web-player
Normal file
@@ -0,0 +1,32 @@
|
||||
FROM rust:1.88.0-bookworm AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
protobuf-compiler \
|
||||
cmake \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
|
||||
ARG FURUMI_VERSION=dev
|
||||
RUN FURUMI_VERSION=${FURUMI_VERSION} cargo build --release --bin furumi-web-player
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN useradd -ms /bin/bash appuser
|
||||
WORKDIR /home/appuser
|
||||
|
||||
COPY --from=builder /usr/src/app/target/release/furumi-web-player /usr/local/bin/furumi-web-player
|
||||
|
||||
USER appuser
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["furumi-web-player"]
|
||||
214
PLAYER-API.md
Normal file
214
PLAYER-API.md
Normal file
@@ -0,0 +1,214 @@
|
||||
# Furumi Web Player API
|
||||
|
||||
Base URL: `http://<host>:<port>/api`
|
||||
|
||||
All endpoints require authentication when `--token` is set (via cookie `furumi_token=<token>` or query param `?token=<token>`).
|
||||
|
||||
All entity references use **slugs** — 12-character hex identifiers (not sequential IDs).
|
||||
|
||||
## Artists
|
||||
|
||||
### `GET /api/artists`
|
||||
|
||||
List all artists that have at least one track.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"slug": "a1b2c3d4e5f6",
|
||||
"name": "Pink Floyd",
|
||||
"album_count": 5,
|
||||
"track_count": 42
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Sorted alphabetically by name.
|
||||
|
||||
### `GET /api/artists/:slug`
|
||||
|
||||
Get artist details.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"slug": "a1b2c3d4e5f6",
|
||||
"name": "Pink Floyd"
|
||||
}
|
||||
```
|
||||
|
||||
**Errors:** `404` if not found.
|
||||
|
||||
### `GET /api/artists/:slug/albums`
|
||||
|
||||
List all albums by an artist.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"slug": "b2c3d4e5f6a7",
|
||||
"name": "Wish You Were Here",
|
||||
"year": 1975,
|
||||
"track_count": 5,
|
||||
"has_cover": true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Sorted by year (nulls last), then name.
|
||||
|
||||
### `GET /api/artists/:slug/tracks`
|
||||
|
||||
List all tracks by an artist across all albums.
|
||||
|
||||
**Response:** same as album tracks (see below).
|
||||
|
||||
Sorted by album year, album name, track number, title.
|
||||
|
||||
## Albums
|
||||
|
||||
### `GET /api/albums/:slug`
|
||||
|
||||
List all tracks in an album.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"slug": "c3d4e5f6a7b8",
|
||||
"title": "Have a Cigar",
|
||||
"track_number": 3,
|
||||
"duration_secs": 312.5,
|
||||
"artist_name": "Pink Floyd",
|
||||
"album_name": "Wish You Were Here",
|
||||
"album_slug": "b2c3d4e5f6a7",
|
||||
"genre": "Progressive Rock"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Sorted by track number (nulls last), then title. Fields `album_name`, `album_slug` may be `null` for tracks without an album.
|
||||
|
||||
### `GET /api/albums/:slug/cover`
|
||||
|
||||
Serve the album cover image from the `album_images` table.
|
||||
|
||||
**Response:** Binary image data with appropriate `Content-Type` (`image/jpeg`, `image/png`, etc.) and `Cache-Control: public, max-age=86400`.
|
||||
|
||||
**Errors:** `404` if no cover exists.
|
||||
|
||||
## Tracks
|
||||
|
||||
### `GET /api/tracks/:slug`
|
||||
|
||||
Get full track details.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"slug": "c3d4e5f6a7b8",
|
||||
"title": "Have a Cigar",
|
||||
"track_number": 3,
|
||||
"duration_secs": 312.5,
|
||||
"genre": "Progressive Rock",
|
||||
"storage_path": "/music/storage/Pink Floyd/Wish You Were Here/03 - Have a Cigar.flac",
|
||||
"artist_name": "Pink Floyd",
|
||||
"artist_slug": "a1b2c3d4e5f6",
|
||||
"album_name": "Wish You Were Here",
|
||||
"album_slug": "b2c3d4e5f6a7",
|
||||
"album_year": 1975
|
||||
}
|
||||
```
|
||||
|
||||
**Errors:** `404` if not found.
|
||||
|
||||
### `GET /api/tracks/:slug/cover`
|
||||
|
||||
Serve cover art for a specific track. Resolution order:
|
||||
|
||||
1. Album cover from `album_images` table (if the track belongs to an album with a cover)
|
||||
2. Embedded cover art extracted from the audio file metadata (ID3/Vorbis/etc. via Symphonia)
|
||||
3. `404` if no cover art is available
|
||||
|
||||
**Response:** Binary image data with `Content-Type` and `Cache-Control: public, max-age=86400`.
|
||||
|
||||
**Errors:** `404` if no cover art found.
|
||||
|
||||
## Streaming
|
||||
|
||||
### `GET /api/stream/:slug`
|
||||
|
||||
Stream the audio file for a track.
|
||||
|
||||
Supports HTTP **Range requests** for seeking:
|
||||
- Full response: `200 OK` with `Content-Length` and `Accept-Ranges: bytes`
|
||||
- Partial response: `206 Partial Content` with `Content-Range`
|
||||
- Invalid range: `416 Range Not Satisfiable`
|
||||
|
||||
`Content-Type` is determined by the file extension (e.g. `audio/flac`, `audio/mpeg`).
|
||||
|
||||
**Errors:** `404` if track or file not found.
|
||||
|
||||
## Search
|
||||
|
||||
### `GET /api/search?q=<query>&limit=<n>`
|
||||
|
||||
Search across artists, albums, and tracks by name (case-insensitive substring match).
|
||||
|
||||
| Parameter | Required | Default | Description |
|
||||
|-----------|----------|---------|-------------|
|
||||
| `q` | yes | — | Search query |
|
||||
| `limit` | no | 20 | Max results |
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"result_type": "artist",
|
||||
"slug": "a1b2c3d4e5f6",
|
||||
"name": "Pink Floyd",
|
||||
"detail": null
|
||||
},
|
||||
{
|
||||
"result_type": "album",
|
||||
"slug": "b2c3d4e5f6a7",
|
||||
"name": "Wish You Were Here",
|
||||
"detail": "Pink Floyd"
|
||||
},
|
||||
{
|
||||
"result_type": "track",
|
||||
"slug": "c3d4e5f6a7b8",
|
||||
"name": "Have a Cigar",
|
||||
"detail": "Pink Floyd"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`detail` contains the artist name for albums and tracks, `null` for artists.
|
||||
|
||||
Sorted by result type (artist → album → track), then by name.
|
||||
|
||||
## Authentication
|
||||
|
||||
When `--token` / `FURUMI_PLAYER_TOKEN` is set:
|
||||
|
||||
- **Cookie:** `furumi_token=<token>` — set after login
|
||||
- **Query parameter:** `?token=<token>` — redirects to player and sets cookie
|
||||
|
||||
When token is empty, authentication is disabled and all endpoints are public.
|
||||
|
||||
Unauthenticated requests receive `401 Unauthorized` with a login form.
|
||||
|
||||
## Error format
|
||||
|
||||
All errors return JSON:
|
||||
|
||||
```json
|
||||
{
|
||||
"error": "description of the error"
|
||||
}
|
||||
```
|
||||
|
||||
With appropriate HTTP status code (`400`, `404`, `500`, etc.).
|
||||
217
README.md
217
README.md
@@ -1,54 +1,117 @@
|
||||
# Furumi-ng
|
||||
|
||||
Remote filesystem over encrypted gRPC. Mount a directory from a remote server as a local folder via FUSE.
|
||||
A music platform consisting of a remote filesystem, an AI-powered metadata agent, and a database-backed web player.
|
||||
|
||||
Designed for streaming media (video, music) over the network.
|
||||
## Components
|
||||
|
||||
```
|
||||
furumi-server gRPC remote filesystem with TLS and auth
|
||||
furumi-client-core Cross-platform gRPC client library
|
||||
furumi-mount-linux FUSE mount for Linux
|
||||
furumi-mount-macos NFS mount for macOS
|
||||
furumi-agent AI metadata agent (LLM-powered ingest pipeline + admin UI)
|
||||
furumi-web-player Database-backed web music player with OIDC auth
|
||||
```
|
||||
|
||||
### furumi-server
|
||||
|
||||
Exposes a local directory over encrypted gRPC. Supports Bearer token auth, OIDC/SSO, Prometheus metrics, and a built-in web player for direct filesystem browsing.
|
||||
|
||||
### furumi-agent
|
||||
|
||||
Background service that watches an inbox folder for new music files, extracts metadata, normalizes it using a local LLM (via Ollama), and stores canonical metadata in PostgreSQL. Features:
|
||||
|
||||
- Automatic metadata extraction (Symphonia) and file path parsing
|
||||
- LLM normalization with RAG (queries existing artists/albums in DB for consistency)
|
||||
- Featured artist detection (`feat.`, `п.у.`, `&`, etc.)
|
||||
- Album cover image processing
|
||||
- Auto-approval for high-confidence results, review queue for uncertain ones
|
||||
- Admin web UI with batch operations, inline editing, album grouping
|
||||
- Organized file storage (`Artist/Album/Track.ext`)
|
||||
- Configurable system prompt (built-in default or external file)
|
||||
- Database migrations via sqlx
|
||||
|
||||
### furumi-web-player
|
||||
|
||||
Web music player that reads exclusively from the database populated by the agent. Features:
|
||||
|
||||
- Browse by Artists, Albums, Tracks
|
||||
- Full-text search across the library
|
||||
- Audio streaming with HTTP Range request support
|
||||
- Album cover art (from DB or embedded in audio files)
|
||||
- Queue management with shuffle, repeat, drag-and-drop reorder
|
||||
- Media Session API (hardware controls, lock screen integration)
|
||||
- OIDC/SSO authentication
|
||||
- Deep linking (`?t=<track_slug>`)
|
||||
- Relative URL paths (works behind any reverse proxy prefix)
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
furumi-server (gRPC + TLS) ←→ furumi-client-core ←→ furumi-mount-{linux,macos} (FUSE / NFS)
|
||||
┌─────────────────┐
|
||||
│ Ollama (LLM) │
|
||||
└────────┬────────┘
|
||||
│
|
||||
┌──────────┐ ┌────────────┴────────────┐ ┌──────────────────┐
|
||||
│ Inbox │───→│ furumi-agent │───→│ Storage (files) │
|
||||
│ folder │ │ (ingest + admin UI) │ └────────┬─────────┘
|
||||
└──────────┘ └────────────┬────────────┘ │
|
||||
│ │
|
||||
┌──────┴──────┐ ┌───────┴────────┐
|
||||
│ PostgreSQL │←──────────│ furumi-web- │
|
||||
│ (metadata) │ │ player │
|
||||
└─────────────┘ └────────────────┘
|
||||
```
|
||||
|
||||
- **furumi-server** — exposes a directory over gRPC with auto-TLS, Bearer token auth, and Prometheus metrics
|
||||
- **furumi-client-core** — cross-platform gRPC client library with attribute caching
|
||||
- **furumi-mount-linux** — mounts the remote directory locally via FUSE (read-only)
|
||||
- **furumi-mount-macos** — mounts the remote directory locally via a local NFS server (read-only)
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Remote Filesystem (FUSE/NFS mount)
|
||||
|
||||
```bash
|
||||
# Build
|
||||
cargo build --release --workspace
|
||||
|
||||
# Server — auto-generates TLS certificate, saves it for client
|
||||
# Server
|
||||
./target/release/furumi-server \
|
||||
--root /path/to/media \
|
||||
--token mysecrettoken \
|
||||
--tls-cert-out /tmp/furumi-ca.pem
|
||||
--token mysecrettoken
|
||||
|
||||
# Client (Linux) — automatically uses TLS, trusts server certificate
|
||||
# Client (Linux)
|
||||
./target/release/furumi-mount-linux \
|
||||
--server server-ip:50051 \
|
||||
--token mysecrettoken \
|
||||
--mount /mnt/remote
|
||||
|
||||
# Client (macOS)
|
||||
./target/release/furumi-mount-macos \
|
||||
--server server-ip:50051 \
|
||||
--token mysecrettoken \
|
||||
--mount /Volumes/remote
|
||||
|
||||
# Use it
|
||||
ls /mnt/remote
|
||||
mpv /mnt/remote/video.mkv
|
||||
```
|
||||
|
||||
## Encryption
|
||||
### Music Platform (Agent + Player)
|
||||
|
||||
TLS is enabled by default. The server auto-generates a self-signed certificate on each start — no manual cert management required. The client automatically trusts the server's certificate for encryption.
|
||||
Requires PostgreSQL with `pg_trgm` extension and Ollama for LLM.
|
||||
|
||||
To disable TLS (not recommended): `--no-tls` on both server and client.
|
||||
```bash
|
||||
# 1. Start PostgreSQL
|
||||
docker run -d --name furumi-pg \
|
||||
-e POSTGRES_DB=furumi -e POSTGRES_USER=furumi -e POSTGRES_PASSWORD=furumi \
|
||||
-p 5432:5432 postgres:17
|
||||
|
||||
# 2. Create directories
|
||||
mkdir -p /music/inbox /music/storage
|
||||
|
||||
# 3. Start the agent (runs migrations automatically)
|
||||
./target/release/furumi-agent \
|
||||
--inbox-dir /music/inbox \
|
||||
--storage-dir /music/storage \
|
||||
--database-url "postgres://furumi:furumi@localhost:5432/furumi" \
|
||||
--ollama-url "http://localhost:11434" \
|
||||
--ollama-model "qwen3:14b"
|
||||
|
||||
# 4. Start the web player
|
||||
./target/release/furumi-web-player \
|
||||
--storage-dir /music/storage \
|
||||
--database-url "postgres://furumi:furumi@localhost:5432/furumi"
|
||||
|
||||
# 5. Drop music files into /music/inbox — agent processes them automatically
|
||||
# 6. Open http://localhost:8080 to play music
|
||||
# 7. Open http://localhost:8090 for the agent admin UI
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -62,9 +125,11 @@ All options can be set via CLI flags or environment variables.
|
||||
| `--root` | `FURUMI_ROOT` | `.` | Directory to expose |
|
||||
| `--token` | `FURUMI_TOKEN` | *(empty, auth off)* | Bearer token |
|
||||
| `--metrics-bind` | `FURUMI_METRICS_BIND` | `0.0.0.0:9090` | Prometheus endpoint |
|
||||
| `--web-bind` | `FURUMI_WEB_BIND` | `0.0.0.0:8080` | Built-in web player |
|
||||
| `--no-web` | — | `false` | Disable built-in web player |
|
||||
| `--no-tls` | — | `false` | Disable TLS |
|
||||
|
||||
### Client
|
||||
### Client (Linux / macOS)
|
||||
|
||||
| Flag | Env | Default | Description |
|
||||
|------|-----|---------|-------------|
|
||||
@@ -73,22 +138,110 @@ All options can be set via CLI flags or environment variables.
|
||||
| `--mount` | `FURUMI_MOUNT` | — | Mount point directory |
|
||||
| `--no-tls` | — | `false` | Disable TLS |
|
||||
|
||||
### Metadata Agent
|
||||
|
||||
| Flag | Env | Default | Description |
|
||||
|------|-----|---------|-------------|
|
||||
| `--bind` | `FURUMI_AGENT_BIND` | `0.0.0.0:8090` | Admin UI address |
|
||||
| `--inbox-dir` | `FURUMI_AGENT_INBOX_DIR` | — | Watch folder for new files |
|
||||
| `--storage-dir` | `FURUMI_AGENT_STORAGE_DIR` | — | Permanent storage folder |
|
||||
| `--database-url` | `FURUMI_AGENT_DATABASE_URL` | — | PostgreSQL URL |
|
||||
| `--ollama-url` | `FURUMI_AGENT_OLLAMA_URL` | `http://localhost:11434` | Ollama API endpoint |
|
||||
| `--ollama-model` | `FURUMI_AGENT_OLLAMA_MODEL` | `qwen3:14b` | LLM model name |
|
||||
| `--poll-interval-secs` | `FURUMI_AGENT_POLL_INTERVAL_SECS` | `30` | Inbox scan interval |
|
||||
| `--confidence-threshold` | `FURUMI_AGENT_CONFIDENCE_THRESHOLD` | `0.85` | Auto-approve threshold |
|
||||
| `--system-prompt-file` | `FURUMI_AGENT_SYSTEM_PROMPT_FILE` | *(built-in)* | Custom LLM prompt |
|
||||
|
||||
### Web Player
|
||||
|
||||
| Flag | Env | Default | Description |
|
||||
|------|-----|---------|-------------|
|
||||
| `--bind` | `FURUMI_PLAYER_BIND` | `0.0.0.0:8080` | Player address |
|
||||
| `--database-url` | `FURUMI_PLAYER_DATABASE_URL` | — | PostgreSQL URL |
|
||||
| `--storage-dir` | `FURUMI_PLAYER_STORAGE_DIR` | — | Storage folder (for streaming) |
|
||||
| `--oidc-issuer-url` | `FURUMI_PLAYER_OIDC_ISSUER_URL` | *(disabled)* | OIDC issuer |
|
||||
| `--oidc-client-id` | `FURUMI_PLAYER_OIDC_CLIENT_ID` | — | OIDC client ID |
|
||||
| `--oidc-client-secret` | `FURUMI_PLAYER_OIDC_CLIENT_SECRET` | — | OIDC client secret |
|
||||
| `--oidc-redirect-url` | `FURUMI_PLAYER_OIDC_REDIRECT_URL` | — | OIDC redirect URL |
|
||||
| `--oidc-session-secret` | `FURUMI_PLAYER_OIDC_SESSION_SECRET` | *(random)* | Session HMAC secret |
|
||||
|
||||
## Docker Compose
|
||||
|
||||
The easiest way to run the entire backend stack (PostgreSQL, Agent, Web Player, and gRPC Server) is using Docker Compose.
|
||||
|
||||
### Quick Start
|
||||
|
||||
1. **Prepare directories**:
|
||||
```bash
|
||||
mkdir -p inbox storage
|
||||
```
|
||||
2. **Start the services**:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
3. **Check logs**:
|
||||
```bash
|
||||
docker compose logs -f
|
||||
```
|
||||
|
||||
The following services will be available:
|
||||
- **Web Player**: [http://localhost:8085](http://localhost:8085)
|
||||
- **Agent Admin UI**: [http://localhost:8090](http://localhost:8090)
|
||||
- **Metrics**: [http://localhost:9090/metrics](http://localhost:9090/metrics)
|
||||
|
||||
> [!NOTE]
|
||||
> The Agent expects Ollama to be running. By default, it tries to connect to the host at `http://localhost:11434`.
|
||||
|
||||
### Reference Commands
|
||||
|
||||
- **Start**: `docker compose up -d`
|
||||
- **Stop**: `docker compose stop`
|
||||
- **Stop and remove containers**: `docker compose down`
|
||||
- **Clear database and storage**: `docker compose down -v`
|
||||
|
||||
### Environment Variables
|
||||
|
||||
To configure the Agent (especially for remote Ollama or private models) and database, create an `.env` file in the root directory:
|
||||
|
||||
```env
|
||||
# Database
|
||||
POSTGRES_PASSWORD=secure-password
|
||||
|
||||
# LLM (Ollama)
|
||||
OLLAMA_URL=http://your-ollama-host:11434
|
||||
OLLAMA_AUTH="Bearer your-token"
|
||||
|
||||
# Server Security
|
||||
FURUMI_TOKEN=secure-server-token
|
||||
```
|
||||
|
||||
For more options, refer to the [Configuration](#configuration) section.
|
||||
|
||||
## Docker
|
||||
|
||||
Pre-built images are available on Docker Hub:
|
||||
|
||||
```bash
|
||||
docker pull <user>/furumi-server
|
||||
docker pull <user>/furumi-web-player
|
||||
docker pull <user>/furumi-metadata-agent
|
||||
```
|
||||
|
||||
## Prometheus Metrics
|
||||
|
||||
Available at `http://<metrics-bind>/metrics`:
|
||||
Available at `http://<metrics-bind>/metrics` (server only):
|
||||
|
||||
- `furumi_grpc_requests_total` — request count by method and status
|
||||
- `furumi_grpc_request_duration_seconds` — request latency histogram
|
||||
- `furumi_bytes_read_total` — total bytes streamed
|
||||
- `furumi_active_streams` — current streaming connections
|
||||
- `furumi_file_open_errors_total` — file access errors
|
||||
- `furumi_auth_failures_total` — authentication failures
|
||||
|
||||
## Requirements
|
||||
|
||||
- Linux with `libfuse3-dev` and `pkg-config` (for client)
|
||||
- macOS (uses built-in NFS client)
|
||||
- Rust 2024 edition
|
||||
- PostgreSQL 14+ with `pg_trgm` extension (for agent and web player)
|
||||
- Ollama with a local LLM (for agent)
|
||||
- Linux with `libfuse3-dev` (for FUSE client only)
|
||||
|
||||
## License
|
||||
|
||||
|
||||
61
docker-compose.yml
Normal file
61
docker-compose.yml
Normal file
@@ -0,0 +1,61 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:17-alpine
|
||||
container_name: furumi-db
|
||||
environment:
|
||||
POSTGRES_DB: ${POSTGRES_DB:-furumi}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-furumi}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-furumi}
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U furumi -d furumi"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
agent:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.agent
|
||||
container_name: furumi-agent
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "8090:8090"
|
||||
environment:
|
||||
FURUMI_AGENT_DATABASE_URL: "postgres://${POSTGRES_USER:-furumi}:${POSTGRES_PASSWORD:-furumi}@db:5432/${POSTGRES_DB:-furumi}"
|
||||
FURUMI_AGENT_INBOX_DIR: "/inbox"
|
||||
FURUMI_AGENT_STORAGE_DIR: "/storage"
|
||||
FURUMI_AGENT_OLLAMA_URL: "${OLLAMA_URL:-http://host.docker.internal:11434}"
|
||||
FURUMI_AGENT_OLLAMA_AUTH: "${OLLAMA_AUTH:-CHANGE-ME}"
|
||||
FURUMI_PLAYER_BIND: "0.0.0.0:8090"
|
||||
FURUMI_AGENT_POLL_INTERVAL_SECS: 5
|
||||
volumes:
|
||||
- ./inbox:/inbox
|
||||
- ./storage:/storage
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
restart: always
|
||||
|
||||
web-player:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.web-player
|
||||
container_name: furumi-web-player
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "8085:8085"
|
||||
environment:
|
||||
FURUMI_PLAYER_DATABASE_URL: "postgres://${POSTGRES_USER:-furumi}:${POSTGRES_PASSWORD:-furumi}@db:5432/${POSTGRES_DB:-furumi}"
|
||||
FURUMI_PLAYER_STORAGE_DIR: "/storage"
|
||||
FURUMI_PLAYER_BIND: "0.0.0.0:8085"
|
||||
volumes:
|
||||
- ./storage:/storage
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
23
furumi-agent/Cargo.toml
Normal file
23
furumi-agent/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "furumi-agent"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
blake3 = "1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.5", features = ["derive", "env"] }
|
||||
encoding_rs = "0.8"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls", "json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "chrono", "uuid", "migrate"] }
|
||||
symphonia = { version = "0.5", default-features = false, features = ["mp3", "aac", "flac", "vorbis", "wav", "alac", "adpcm", "pcm", "mpa", "isomp4", "ogg", "aiff", "mkv"] }
|
||||
thiserror = "2.0"
|
||||
tokio = { version = "1.50", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
axum = { version = "0.7", features = ["tokio", "macros"] }
|
||||
tower = { version = "0.4", features = ["util"] }
|
||||
uuid = { version = "1", features = ["v4", "serde"] }
|
||||
86
furumi-agent/migrations/0001_initial.sql
Normal file
86
furumi-agent/migrations/0001_initial.sql
Normal file
@@ -0,0 +1,86 @@
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
|
||||
CREATE TABLE artists (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE albums (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
artist_id BIGINT NOT NULL REFERENCES artists(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
year INT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE (artist_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE tracks (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
artist_id BIGINT NOT NULL REFERENCES artists(id) ON DELETE CASCADE,
|
||||
album_id BIGINT REFERENCES albums(id) ON DELETE SET NULL,
|
||||
title TEXT NOT NULL,
|
||||
track_number INT,
|
||||
genre TEXT,
|
||||
duration_secs DOUBLE PRECISION,
|
||||
codec TEXT,
|
||||
bitrate INT,
|
||||
sample_rate INT,
|
||||
file_hash TEXT NOT NULL UNIQUE,
|
||||
file_size BIGINT NOT NULL,
|
||||
storage_path TEXT NOT NULL,
|
||||
manual_override BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE pending_tracks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
inbox_path TEXT NOT NULL,
|
||||
file_hash TEXT NOT NULL,
|
||||
file_size BIGINT NOT NULL,
|
||||
raw_title TEXT,
|
||||
raw_artist TEXT,
|
||||
raw_album TEXT,
|
||||
raw_year INT,
|
||||
raw_track_number INT,
|
||||
raw_genre TEXT,
|
||||
duration_secs DOUBLE PRECISION,
|
||||
path_title TEXT,
|
||||
path_artist TEXT,
|
||||
path_album TEXT,
|
||||
path_year INT,
|
||||
path_track_number INT,
|
||||
norm_title TEXT,
|
||||
norm_artist TEXT,
|
||||
norm_album TEXT,
|
||||
norm_year INT,
|
||||
norm_track_number INT,
|
||||
norm_genre TEXT,
|
||||
norm_featured_artists TEXT,
|
||||
confidence DOUBLE PRECISION,
|
||||
llm_notes TEXT,
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE track_artists (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
track_id BIGINT NOT NULL REFERENCES tracks(id) ON DELETE CASCADE,
|
||||
artist_id BIGINT NOT NULL REFERENCES artists(id) ON DELETE CASCADE,
|
||||
role TEXT NOT NULL DEFAULT 'featured',
|
||||
UNIQUE (track_id, artist_id, role)
|
||||
);
|
||||
|
||||
-- Indexes
|
||||
CREATE INDEX idx_artists_name_trgm ON artists USING gin (name gin_trgm_ops);
|
||||
CREATE INDEX idx_albums_name_trgm ON albums USING gin (name gin_trgm_ops);
|
||||
CREATE INDEX idx_tracks_file_hash ON tracks (file_hash);
|
||||
CREATE INDEX idx_pending_status ON pending_tracks (status);
|
||||
CREATE INDEX idx_pending_file_hash ON pending_tracks (file_hash);
|
||||
CREATE INDEX idx_track_artists_track ON track_artists (track_id);
|
||||
CREATE INDEX idx_track_artists_artist ON track_artists (artist_id);
|
||||
37
furumi-agent/migrations/0002_covers_and_slugs.sql
Normal file
37
furumi-agent/migrations/0002_covers_and_slugs.sql
Normal file
@@ -0,0 +1,37 @@
|
||||
-- Add slug (public unique ID) to tracks
|
||||
ALTER TABLE tracks ADD COLUMN slug TEXT;
|
||||
|
||||
-- Generate slugs for existing tracks
|
||||
UPDATE tracks SET slug = encode(gen_random_uuid()::text::bytea, 'hex') WHERE slug IS NULL;
|
||||
|
||||
ALTER TABLE tracks ALTER COLUMN slug SET NOT NULL;
|
||||
CREATE UNIQUE INDEX idx_tracks_slug ON tracks (slug);
|
||||
|
||||
-- Add slug to albums
|
||||
ALTER TABLE albums ADD COLUMN slug TEXT;
|
||||
UPDATE albums SET slug = encode(gen_random_uuid()::text::bytea, 'hex') WHERE slug IS NULL;
|
||||
ALTER TABLE albums ALTER COLUMN slug SET NOT NULL;
|
||||
CREATE UNIQUE INDEX idx_albums_slug ON albums (slug);
|
||||
|
||||
-- Add slug to artists
|
||||
ALTER TABLE artists ADD COLUMN slug TEXT;
|
||||
UPDATE artists SET slug = encode(gen_random_uuid()::text::bytea, 'hex') WHERE slug IS NULL;
|
||||
ALTER TABLE artists ALTER COLUMN slug SET NOT NULL;
|
||||
CREATE UNIQUE INDEX idx_artists_slug ON artists (slug);
|
||||
|
||||
-- Album artwork table
|
||||
CREATE TABLE album_images (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
album_id BIGINT NOT NULL REFERENCES albums(id) ON DELETE CASCADE,
|
||||
image_type TEXT NOT NULL DEFAULT 'cover', -- 'cover', 'back', 'booklet', 'other'
|
||||
file_path TEXT NOT NULL, -- relative path in storage
|
||||
file_hash TEXT NOT NULL,
|
||||
mime_type TEXT NOT NULL,
|
||||
width INT,
|
||||
height INT,
|
||||
file_size BIGINT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_album_images_album ON album_images (album_id);
|
||||
CREATE UNIQUE INDEX idx_album_images_hash ON album_images (file_hash);
|
||||
10
furumi-agent/migrations/0003_artist_merges.sql
Normal file
10
furumi-agent/migrations/0003_artist_merges.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
CREATE TABLE artist_merges (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
source_artist_ids TEXT NOT NULL,
|
||||
proposal TEXT,
|
||||
llm_notes TEXT,
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
12
furumi-agent/migrations/0004_release_types_hidden.sql
Normal file
12
furumi-agent/migrations/0004_release_types_hidden.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
ALTER TABLE albums
|
||||
ADD COLUMN IF NOT EXISTS release_type TEXT NOT NULL DEFAULT 'album',
|
||||
ADD COLUMN IF NOT EXISTS hidden BOOLEAN NOT NULL DEFAULT FALSE;
|
||||
|
||||
ALTER TABLE tracks
|
||||
ADD COLUMN IF NOT EXISTS hidden BOOLEAN NOT NULL DEFAULT FALSE;
|
||||
|
||||
ALTER TABLE artists
|
||||
ADD COLUMN IF NOT EXISTS hidden BOOLEAN NOT NULL DEFAULT FALSE;
|
||||
|
||||
ALTER TABLE pending_tracks
|
||||
ADD COLUMN IF NOT EXISTS norm_release_type TEXT;
|
||||
65
furumi-agent/prompts/merge.txt
Normal file
65
furumi-agent/prompts/merge.txt
Normal file
@@ -0,0 +1,65 @@
|
||||
You are a music library artist merge assistant. You will receive a list of artists (with their albums and tracks, each with database IDs) that have been identified as potential duplicates. Your job is to analyze them and produce a merge plan.
|
||||
|
||||
## Input format
|
||||
|
||||
You will receive a structured list like:
|
||||
|
||||
### Artist ID 42: "pink floyd"
|
||||
Album ID 10: "the wall" (1979)
|
||||
- 01. "In the Flesh?" [track_id=100]
|
||||
- 02. "The Thin Ice" [track_id=101]
|
||||
|
||||
### Artist ID 43: "Pink Floyd"
|
||||
Album ID 11: "Wish You Were Here" (1975)
|
||||
- 01. "Shine On You Crazy Diamond (Parts I-V)" [track_id=200]
|
||||
|
||||
## Your task
|
||||
|
||||
Determine if the artists are duplicates and produce a merge plan.
|
||||
|
||||
## Rules
|
||||
|
||||
### 1. Canonical artist name
|
||||
- Use correct capitalization and canonical spelling (e.g., "pink floyd" → "Pink Floyd", "AC DC" → "AC/DC").
|
||||
- If the database already contains an artist with a well-formed name, prefer that exact form.
|
||||
- If one artist has clearly more tracks or albums, their name spelling may be more authoritative.
|
||||
- Fix obvious typos or casing errors.
|
||||
|
||||
### 2. Winner artist
|
||||
- `winner_artist_id` must be the ID of one of the provided artists — the one whose identity (ID) will survive in the database.
|
||||
- All other artists are "losers" and will be deleted after their albums and tracks are moved to the winner.
|
||||
- Prefer the artist ID that has the most tracks/albums, or the one with the most correct canonical name.
|
||||
|
||||
### 3. Canonical album names
|
||||
- Use correct capitalization (title case for English, preserve language for non-English).
|
||||
- Fix slug-like names: "new-songs" → "New Songs", "the_dark_side" → "The Dark Side".
|
||||
- Fix all-lowercase or all-uppercase: "WISH YOU WERE HERE" → "Wish You Were Here".
|
||||
- Preserve creative/intentional stylization (e.g., "OK Computer" stays as-is, "(What's the Story) Morning Glory?" stays as-is).
|
||||
- If the database already contains the album under another artist with a well-formed name, use that exact name.
|
||||
|
||||
### 4. Album deduplication
|
||||
- If two albums (across the artists being merged) have the same or very similar name, they are the same album. In that case, pick the better-formed one as the "winner album".
|
||||
- Set `merge_into_album_id` to the winner album's ID for the duplicate album. This means all tracks from the duplicate will be moved into the winner album, and the duplicate album will be deleted.
|
||||
- If an album is unique (no duplicate exists), set `merge_into_album_id` to null — the album will simply be renamed and moved to the winner artist.
|
||||
- When comparing album names for similarity, ignore case, punctuation, and common suffixes like "(Remastered)" for the purpose of duplicate detection. However, treat remastered editions as separate albums unless both albums are clearly the same remaster.
|
||||
|
||||
### 5. Album mappings coverage
|
||||
- `album_mappings` must include an entry for EVERY album across ALL source artists, not just duplicates.
|
||||
- Every album (from every artist being merged) needs a canonical name, even if it is not being merged into another album.
|
||||
|
||||
### 6. Notes
|
||||
- The `notes` field should briefly explain: which artist was chosen as winner and why, which albums were renamed, which albums were deduplicated and into what.
|
||||
|
||||
## Response format
|
||||
|
||||
You MUST respond with a single JSON object, no markdown fences, no extra text:
|
||||
|
||||
{"canonical_artist_name": "...", "winner_artist_id": 42, "album_mappings": [{"source_album_id": 10, "canonical_name": "The Wall", "merge_into_album_id": null}, {"source_album_id": 11, "canonical_name": "Wish You Were Here", "merge_into_album_id": null}], "notes": "..."}
|
||||
|
||||
- `canonical_artist_name`: the single correct name for this artist after merging.
|
||||
- `winner_artist_id`: the integer ID of the artist whose record survives (must be one of the IDs provided).
|
||||
- `album_mappings`: an array covering ALL albums from ALL source artists. Each entry:
|
||||
- `source_album_id`: the integer ID of this album (as provided in the input).
|
||||
- `canonical_name`: the corrected canonical name for this album.
|
||||
- `merge_into_album_id`: null if this album is just renamed/moved to the winner artist; or the integer ID of another album (the winner album) if this album's tracks should be merged into that album and this album deleted. Never set merge_into_album_id to the same album's own ID.
|
||||
- `notes`: brief explanation of the decisions made.
|
||||
102
furumi-agent/prompts/normalize.txt
Normal file
102
furumi-agent/prompts/normalize.txt
Normal file
@@ -0,0 +1,102 @@
|
||||
You are a music metadata normalization assistant. Your job is to take raw metadata extracted from audio files and produce clean, accurate, canonical metadata suitable for a music library database.
|
||||
|
||||
## Rules
|
||||
|
||||
1. **Artist names** must use correct capitalization and canonical spelling. Examples:
|
||||
- "pink floyd" → "Pink Floyd"
|
||||
- "AC DC" → "AC/DC"
|
||||
- "Guns n roses" → "Guns N' Roses"
|
||||
- "Led zepplin" → "Led Zeppelin" (fix common misspellings)
|
||||
- "саша скул" → "Саша Скул" (fix capitalization, keep the language as-is)
|
||||
- If the database already contains a matching artist (same name in any case or transliteration), always use the existing canonical name exactly. For example, if the DB has "Саша Скул" and the file says "саша скул" or "Sasha Skul", use "Саша Скул".
|
||||
- **Compound artist fields**: When the artist field or path contains multiple artist names joined by "и", "and", "&", "/", ",", "x", or "vs", you MUST split them. The "artist" field must contain ONLY ONE primary artist. All others go into "featured_artists". If one of the names already exists in the database, prefer that one as the primary artist.
|
||||
- Examples:
|
||||
- Artist or path: "Саша Скул и Олег Харитонов" with DB containing "Саша Скул" → artist: "Саша Скул", featured_artists: ["Олег Харитонов"]
|
||||
- Artist: "Metallica & Lou Reed" with DB containing "Metallica" → artist: "Metallica", featured_artists: ["Lou Reed"]
|
||||
- Artist: "Artist A / Artist B" with neither in DB → artist: "Artist A", featured_artists: ["Artist B"] (first listed = primary)
|
||||
- **NEVER create a new compound artist** like "X и Y" or "X & Y" as a single artist name. Always split into primary + featured.
|
||||
|
||||
2. **Featured artists**: Many tracks include collaborations. Guest artists can be indicated by ANY of the following markers (case-insensitive) in the artist field, track title, filename, or path:
|
||||
- English: "feat.", "ft.", "featuring", "with"
|
||||
- Russian: "п.у.", "при участии"
|
||||
- Parenthetical: "(feat. X)", "(ft. X)", "(п.у. X)", "(при участии X)"
|
||||
- Any other language-specific equivalent indicating a guest/featured collaboration
|
||||
|
||||
You must:
|
||||
- Extract the **primary artist** (the main performer) into the "artist" field.
|
||||
- Extract ALL **featured/guest artists** into a separate "featured_artists" array.
|
||||
- Remove the collaboration marker and featured artist names from the track title, keeping only the song name.
|
||||
- When multiple featured artists are listed, split them by commas or "&" into separate entries.
|
||||
- Examples:
|
||||
- Artist: "НСМВГЛП feat. XACV SQUAD" → artist: "НСМВГЛП", featured_artists: ["XACV SQUAD"]
|
||||
- Title: "Знаешь ли ты feat. SharOn" → title: "Знаешь ли ты", featured_artists: ["SharOn"]
|
||||
- Title: "Ваши мамки (п.у. Ваня Айван,Иван Смех, Жильцов)" → title: "Ваши мамки", featured_artists: ["Ваня Айван", "Иван Смех", "Жильцов"]
|
||||
- Title: "Молоды (п.у. Паша Батруха)" → title: "Молоды", featured_artists: ["Паша Батруха"]
|
||||
- Title: "Повелитель Мух (п.у. Пикуль)" → title: "Повелитель Мух", featured_artists: ["Пикуль"]
|
||||
- Artist: "A & B ft. C, D" → artist: "A & B", featured_artists: ["C", "D"]
|
||||
- **IMPORTANT**: Always check for parenthetical markers like "(п.у. ...)" or "(feat. ...)" at the end of track titles. These are very common and must not be missed.
|
||||
- Apply the same capitalization and consistency rules to featured artist names.
|
||||
- If the database already contains a matching featured artist name, use the existing canonical form.
|
||||
|
||||
3. **Album names** must use correct capitalization and canonical spelling.
|
||||
- Use title case for English albums.
|
||||
- Preserve original language for non-English albums.
|
||||
- If the database already contains a matching album under the same artist, use the existing name exactly.
|
||||
- Do not alter the creative content of album names (same principle as track titles).
|
||||
- **Remastered editions**: A remastered release is a separate album entity, even if it shares the same title and tracks as the original. If the tags or path indicate a remaster (e.g., "Remastered", "Remaster", "REMASTERED" anywhere in tags, filename, or path), append " (Remastered)" to the album name if not already present, and use the year of the remaster release (not the original). Example: original album "The Wall" (1979) remastered in 2011 → album: "The Wall (Remastered)", year: 2011.
|
||||
|
||||
4. **Track titles** must use correct capitalization, but their content must be preserved exactly.
|
||||
- Use title case for English titles.
|
||||
- Preserve original language for non-English titles.
|
||||
- Remove leading track numbers if present (e.g., "01 - Have a Cigar" → "Have a Cigar").
|
||||
- **NEVER remove, add, or alter words, numbers, suffixes, punctuation marks, or special characters in titles.** Your job is to fix capitalization and encoding, not to edit the creative content. If a title contains unusual punctuation, numbers, apostrophes, or symbols — they are intentional and must be kept as-is.
|
||||
- If all tracks in the same album follow a naming pattern (e.g., numbered names like "Part 1", "Part 2"), preserve that pattern consistently. Do not simplify or truncate individual track names.
|
||||
|
||||
5. **Year**: If not present in tags, try to infer from the file path. Only set a year if you are confident it is correct.
|
||||
|
||||
6. **Track number**: If not present in tags, try to infer from the filename (e.g., "03 - Song.flac" → track 3).
|
||||
|
||||
7. **Genre**: Normalize to a common genre name. Avoid overly specific sub-genres unless the existing database already uses them.
|
||||
|
||||
8. **Encoding issues**: Raw metadata may contain mojibake (e.g., Cyrillic text misread as Latin-1). If you detect garbled text that looks like encoding errors, attempt to determine the intended text.
|
||||
|
||||
9. **Preservation principle**: When in doubt, preserve the original value. Only change metadata when you are confident the change is a correction (e.g., fixing capitalization, fixing encoding, matching to an existing DB entry). Do not "clean up" or "simplify" values that look unusual — artists often use unconventional naming intentionally.
|
||||
|
||||
10. **Consistency**: When the database already contains entries for an artist or album, your output MUST match the existing canonical names. Do not introduce new variations.
|
||||
|
||||
11. **Confidence**: Rate your confidence from 0.0 to 1.0.
|
||||
- 1.0: All fields are clear and unambiguous.
|
||||
- 0.8+: Minor inferences made (e.g., year from path), but high certainty.
|
||||
- 0.5-0.8: Some guesswork involved, human review recommended.
|
||||
- Below 0.5: Significant uncertainty, definitely needs review.
|
||||
|
||||
12. **Release type**: Determine the type of release based on all available evidence.
|
||||
|
||||
Allowed values (use exactly one, lowercase):
|
||||
- `album`: Full-length release, typically 4+ tracks
|
||||
- `single`: One or two tracks released as a single, OR folder/tag explicitly says "Single", "Сингл"
|
||||
- `ep`: Short release, typically 3-6 tracks, OR folder/path contains "EP" or "ЕП"
|
||||
- `compilation`: Best-of, greatest hits, anthology, сборник, compilation
|
||||
- `live`: Live recording, concert, live album — folder or tags contain "Live", "Concert", "Концерт"
|
||||
|
||||
Determination rules (in priority order):
|
||||
- If the folder path contains keywords like "Single", "Сингл", "single" → `single`
|
||||
- If the folder path contains "EP", "ЕП", "ep" (case-insensitive) → `ep`
|
||||
- If the folder path contains "Live", "Concert", "Концерт", "live" → `live`
|
||||
- If the folder path contains "Compilation", "сборник", "Anthology", "Greatest Hits" → `compilation`
|
||||
- If album name contains these keywords → apply same logic
|
||||
- If track count in folder is 1–2 → likely `single`
|
||||
- If track count in folder is 3–6 and no other evidence → likely `ep`
|
||||
- If track count is 7+ → likely `album`
|
||||
- When in doubt with 3–6 tracks, prefer `ep` over `album` only if EP indicators present, otherwise `album`
|
||||
|
||||
## Response format
|
||||
|
||||
You MUST respond with a single JSON object, no markdown fences, no extra text:
|
||||
|
||||
{"artist": "...", "album": "...", "title": "...", "year": 2000, "track_number": 1, "genre": "...", "featured_artists": [], "release_type": "album", "confidence": 0.95, "notes": "brief explanation of changes made"}
|
||||
|
||||
- Use null for fields you cannot determine.
|
||||
- Use an empty array [] for "featured_artists" if there are no featured artists.
|
||||
- The "notes" field should briefly explain what you changed and why.
|
||||
- "release_type" must be exactly one of: "album", "single", "ep", "compilation", "live"
|
||||
95
furumi-agent/src/config.rs
Normal file
95
furumi-agent/src/config.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
/// Default system prompt, compiled into the binary as a fallback.
|
||||
const DEFAULT_SYSTEM_PROMPT: &str = include_str!("../prompts/normalize.txt");
|
||||
|
||||
const DEFAULT_MERGE_PROMPT: &str = include_str!("../prompts/merge.txt");
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about = "Furumi Agent: music metadata ingest and normalization")]
|
||||
pub struct Args {
|
||||
/// IP address and port for the admin web UI
|
||||
#[arg(long, env = "FURUMI_AGENT_BIND", default_value = "0.0.0.0:8090")]
|
||||
pub bind: String,
|
||||
|
||||
/// Directory to watch for new music files
|
||||
#[arg(long, env = "FURUMI_AGENT_INBOX_DIR")]
|
||||
pub inbox_dir: PathBuf,
|
||||
|
||||
/// Directory for permanently stored and organized music files
|
||||
#[arg(long, env = "FURUMI_AGENT_STORAGE_DIR")]
|
||||
pub storage_dir: PathBuf,
|
||||
|
||||
/// PostgreSQL connection URL
|
||||
#[arg(long, env = "FURUMI_AGENT_DATABASE_URL")]
|
||||
pub database_url: String,
|
||||
|
||||
/// Ollama API base URL
|
||||
#[arg(long, env = "FURUMI_AGENT_OLLAMA_URL", default_value = "http://localhost:11434")]
|
||||
pub ollama_url: String,
|
||||
|
||||
/// Ollama model name
|
||||
#[arg(long, env = "FURUMI_AGENT_OLLAMA_MODEL", default_value = "qwen3:14b")]
|
||||
pub ollama_model: String,
|
||||
|
||||
/// Authorization header value for Ollama API (e.g. "Bearer <token>" or "Basic <base64>")
|
||||
#[arg(long, env = "FURUMI_AGENT_OLLAMA_AUTH")]
|
||||
pub ollama_auth: Option<String>,
|
||||
|
||||
/// Inbox scan interval in seconds
|
||||
#[arg(long, env = "FURUMI_AGENT_POLL_INTERVAL_SECS", default_value_t = 30)]
|
||||
pub poll_interval_secs: u64,
|
||||
|
||||
/// Confidence threshold for auto-approval (0.0 - 1.0)
|
||||
#[arg(long, env = "FURUMI_AGENT_CONFIDENCE_THRESHOLD", default_value_t = 0.85)]
|
||||
pub confidence_threshold: f64,
|
||||
|
||||
/// Path to a custom system prompt file (overrides the built-in default)
|
||||
#[arg(long, env = "FURUMI_AGENT_SYSTEM_PROMPT_FILE")]
|
||||
pub system_prompt_file: Option<PathBuf>,
|
||||
|
||||
/// Path to a custom merge prompt file (overrides the built-in default)
|
||||
#[arg(long, env = "FURUMI_AGENT_MERGE_PROMPT_FILE")]
|
||||
pub merge_prompt_file: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
pub fn validate(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
if !self.inbox_dir.exists() || !self.inbox_dir.is_dir() {
|
||||
return Err(format!("Inbox directory {:?} does not exist or is not a directory", self.inbox_dir).into());
|
||||
}
|
||||
if !self.storage_dir.exists() || !self.storage_dir.is_dir() {
|
||||
return Err(format!("Storage directory {:?} does not exist or is not a directory", self.storage_dir).into());
|
||||
}
|
||||
if !(0.0..=1.0).contains(&self.confidence_threshold) {
|
||||
return Err("Confidence threshold must be between 0.0 and 1.0".into());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load the system prompt from a custom file or use the built-in default.
|
||||
pub fn load_system_prompt(&self) -> Result<String, Box<dyn std::error::Error>> {
|
||||
match &self.system_prompt_file {
|
||||
Some(path) => {
|
||||
tracing::info!("Loading system prompt from {:?}", path);
|
||||
Ok(std::fs::read_to_string(path)?)
|
||||
}
|
||||
None => {
|
||||
tracing::info!("Using built-in default system prompt");
|
||||
Ok(DEFAULT_SYSTEM_PROMPT.to_owned())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_merge_prompt(&self) -> Result<String, Box<dyn std::error::Error>> {
|
||||
match &self.merge_prompt_file {
|
||||
Some(path) => {
|
||||
tracing::info!("Loading merge prompt from {:?}", path);
|
||||
Ok(std::fs::read_to_string(path)?)
|
||||
}
|
||||
None => Ok(DEFAULT_MERGE_PROMPT.to_owned()),
|
||||
}
|
||||
}
|
||||
}
|
||||
1151
furumi-agent/src/db.rs
Normal file
1151
furumi-agent/src/db.rs
Normal file
File diff suppressed because it is too large
Load Diff
129
furumi-agent/src/ingest/metadata.rs
Normal file
129
furumi-agent/src/ingest/metadata.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use std::path::Path;
|
||||
|
||||
use symphonia::core::{
|
||||
codecs::CODEC_TYPE_NULL,
|
||||
formats::FormatOptions,
|
||||
io::MediaSourceStream,
|
||||
meta::{MetadataOptions, StandardTagKey},
|
||||
probe::Hint,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct RawMetadata {
|
||||
pub title: Option<String>,
|
||||
pub artist: Option<String>,
|
||||
pub album: Option<String>,
|
||||
pub track_number: Option<u32>,
|
||||
pub year: Option<u32>,
|
||||
pub genre: Option<String>,
|
||||
pub duration_secs: Option<f64>,
|
||||
}
|
||||
|
||||
/// Extract metadata from an audio file using Symphonia.
|
||||
/// Must be called from a blocking context (spawn_blocking).
|
||||
pub fn extract(path: &Path) -> anyhow::Result<RawMetadata> {
|
||||
let file = std::fs::File::open(path)?;
|
||||
let mss = MediaSourceStream::new(Box::new(file), Default::default());
|
||||
|
||||
let mut hint = Hint::new();
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
|
||||
let mut probed = symphonia::default::get_probe().format(
|
||||
&hint,
|
||||
mss,
|
||||
&FormatOptions { enable_gapless: false, ..Default::default() },
|
||||
&MetadataOptions::default(),
|
||||
)?;
|
||||
|
||||
let mut meta = RawMetadata::default();
|
||||
|
||||
// Check metadata side-data (e.g., ID3 tags probed before format)
|
||||
if let Some(rev) = probed.metadata.get().as_ref().and_then(|m| m.current()) {
|
||||
extract_tags(rev.tags(), &mut meta);
|
||||
}
|
||||
|
||||
// Also check format-embedded metadata
|
||||
if let Some(rev) = probed.format.metadata().current() {
|
||||
if meta.title.is_none() {
|
||||
extract_tags(rev.tags(), &mut meta);
|
||||
}
|
||||
}
|
||||
|
||||
// Duration
|
||||
meta.duration_secs = probed
|
||||
.format
|
||||
.tracks()
|
||||
.iter()
|
||||
.find(|t| t.codec_params.codec != CODEC_TYPE_NULL)
|
||||
.and_then(|t| {
|
||||
let n_frames = t.codec_params.n_frames?;
|
||||
let tb = t.codec_params.time_base?;
|
||||
Some(n_frames as f64 * tb.numer as f64 / tb.denom as f64)
|
||||
});
|
||||
|
||||
Ok(meta)
|
||||
}
|
||||
|
||||
fn extract_tags(tags: &[symphonia::core::meta::Tag], meta: &mut RawMetadata) {
|
||||
for tag in tags {
|
||||
let value = fix_encoding(tag.value.to_string());
|
||||
if let Some(key) = tag.std_key {
|
||||
match key {
|
||||
StandardTagKey::TrackTitle => {
|
||||
if meta.title.is_none() {
|
||||
meta.title = Some(value);
|
||||
}
|
||||
}
|
||||
StandardTagKey::Artist | StandardTagKey::Performer => {
|
||||
if meta.artist.is_none() {
|
||||
meta.artist = Some(value);
|
||||
}
|
||||
}
|
||||
StandardTagKey::Album => {
|
||||
if meta.album.is_none() {
|
||||
meta.album = Some(value);
|
||||
}
|
||||
}
|
||||
StandardTagKey::TrackNumber => {
|
||||
if meta.track_number.is_none() {
|
||||
meta.track_number = value.parse().ok();
|
||||
}
|
||||
}
|
||||
StandardTagKey::Date | StandardTagKey::OriginalDate => {
|
||||
if meta.year.is_none() {
|
||||
meta.year = value[..4.min(value.len())].parse().ok();
|
||||
}
|
||||
}
|
||||
StandardTagKey::Genre => {
|
||||
if meta.genre.is_none() {
|
||||
meta.genre = Some(value);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Heuristic to fix mojibake (CP1251 bytes interpreted as Latin-1/Windows-1252).
|
||||
fn fix_encoding(s: String) -> String {
|
||||
let bytes: Vec<u8> = s.chars().map(|c| c as u32).filter(|&c| c <= 255).map(|c| c as u8).collect();
|
||||
|
||||
if bytes.len() != s.chars().count() {
|
||||
return s;
|
||||
}
|
||||
|
||||
let has_mojibake = bytes.iter().any(|&b| b >= 0xC0);
|
||||
if !has_mojibake {
|
||||
return s;
|
||||
}
|
||||
|
||||
let (decoded, _, errors) = encoding_rs::WINDOWS_1251.decode(&bytes);
|
||||
if errors {
|
||||
return s;
|
||||
}
|
||||
|
||||
decoded.into_owned()
|
||||
}
|
||||
743
furumi-agent/src/ingest/mod.rs
Normal file
743
furumi-agent/src/ingest/mod.rs
Normal file
@@ -0,0 +1,743 @@
|
||||
pub mod metadata;
|
||||
pub mod normalize;
|
||||
pub mod path_hints;
|
||||
pub mod mover;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::db;
|
||||
use crate::web::AppState;
|
||||
|
||||
pub async fn run(state: Arc<AppState>) {
|
||||
let interval = Duration::from_secs(state.config.poll_interval_secs);
|
||||
tracing::info!("Ingest loop started, polling every {}s: {:?}", state.config.poll_interval_secs, state.config.inbox_dir);
|
||||
|
||||
loop {
|
||||
match scan_inbox(&state).await {
|
||||
Ok(0) => {}
|
||||
Ok(count) => tracing::info!(count, "processed new files"),
|
||||
Err(e) => tracing::error!(?e, "inbox scan failed"),
|
||||
}
|
||||
// Re-process pending tracks (e.g. retried from admin UI)
|
||||
match reprocess_pending(&state).await {
|
||||
Ok(0) => {}
|
||||
Ok(count) => tracing::info!(count, "re-processed pending tracks"),
|
||||
Err(e) => tracing::error!(?e, "pending re-processing failed"),
|
||||
}
|
||||
// Process pending merge proposals
|
||||
match db::get_pending_merges_for_processing(&state.pool).await {
|
||||
Ok(merge_ids) => {
|
||||
for merge_id in merge_ids {
|
||||
if let Err(e) = crate::merge::propose_merge(&state, merge_id).await {
|
||||
tracing::error!(id = %merge_id, ?e, "Merge proposal failed");
|
||||
let _ = db::update_merge_status(&state.pool, merge_id, "error", Some(&e.to_string())).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => tracing::error!(?e, "Failed to load pending merges"),
|
||||
}
|
||||
tokio::time::sleep(interval).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn scan_inbox(state: &Arc<AppState>) -> anyhow::Result<usize> {
|
||||
let mut count = 0;
|
||||
let mut audio_files = Vec::new();
|
||||
let mut image_files = Vec::new();
|
||||
collect_files(&state.config.inbox_dir, &mut audio_files, &mut image_files).await?;
|
||||
|
||||
if !audio_files.is_empty() || !image_files.is_empty() {
|
||||
tracing::info!("Scan found {} audio file(s) and {} image(s) in inbox", audio_files.len(), image_files.len());
|
||||
}
|
||||
|
||||
for file_path in &audio_files {
|
||||
match process_file(state, file_path).await {
|
||||
Ok(true) => count += 1,
|
||||
Ok(false) => tracing::debug!(path = ?file_path, "skipped (already known)"),
|
||||
Err(e) => tracing::warn!(?e, path = ?file_path, "failed to process file"),
|
||||
}
|
||||
}
|
||||
|
||||
// Process cover images after audio (so albums exist in DB)
|
||||
for image_path in &image_files {
|
||||
match process_cover_image(state, image_path).await {
|
||||
Ok(true) => {
|
||||
tracing::info!(path = ?image_path, "Cover image processed");
|
||||
count += 1;
|
||||
}
|
||||
Ok(false) => tracing::debug!(path = ?image_path, "cover image skipped"),
|
||||
Err(e) => tracing::warn!(?e, path = ?image_path, "failed to process cover image"),
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up empty directories in inbox
|
||||
if count > 0 {
|
||||
cleanup_empty_dirs(&state.config.inbox_dir).await;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Re-process pending tracks from DB (e.g. tracks retried via admin UI).
|
||||
/// These already have raw metadata and path hints stored — just need RAG + LLM.
|
||||
async fn reprocess_pending(state: &Arc<AppState>) -> anyhow::Result<usize> {
|
||||
let pending = db::list_pending_for_processing(&state.pool, 10).await?;
|
||||
if pending.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut count = 0;
|
||||
for pt in &pending {
|
||||
tracing::info!(id = %pt.id, title = pt.raw_title.as_deref().unwrap_or("?"), "Re-processing pending track");
|
||||
|
||||
db::update_pending_status(&state.pool, pt.id, "processing", None).await?;
|
||||
|
||||
// Build raw metadata and hints from stored DB fields
|
||||
let raw_meta = metadata::RawMetadata {
|
||||
title: pt.raw_title.clone(),
|
||||
artist: pt.raw_artist.clone(),
|
||||
album: pt.raw_album.clone(),
|
||||
track_number: pt.raw_track_number.map(|n| n as u32),
|
||||
year: pt.raw_year.map(|n| n as u32),
|
||||
genre: pt.raw_genre.clone(),
|
||||
duration_secs: pt.duration_secs,
|
||||
};
|
||||
|
||||
let hints = db::PathHints {
|
||||
title: pt.path_title.clone(),
|
||||
artist: pt.path_artist.clone(),
|
||||
album: pt.path_album.clone(),
|
||||
year: pt.path_year,
|
||||
track_number: pt.path_track_number,
|
||||
};
|
||||
|
||||
// RAG lookup
|
||||
let artist_query = raw_meta.artist.as_deref()
|
||||
.or(hints.artist.as_deref())
|
||||
.unwrap_or("");
|
||||
let album_query = raw_meta.album.as_deref()
|
||||
.or(hints.album.as_deref())
|
||||
.unwrap_or("");
|
||||
|
||||
let similar_artists = if !artist_query.is_empty() {
|
||||
db::find_similar_artists(&state.pool, artist_query, 5).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let similar_albums = if !album_query.is_empty() {
|
||||
db::find_similar_albums(&state.pool, album_query, 5).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
// LLM normalization (no folder context available for reprocessing from DB)
|
||||
match normalize::normalize(state, &raw_meta, &hints, &similar_artists, &similar_albums, None).await {
|
||||
Ok(normalized) => {
|
||||
let confidence = normalized.confidence.unwrap_or(0.0);
|
||||
let status = if confidence >= state.config.confidence_threshold {
|
||||
"approved"
|
||||
} else {
|
||||
"review"
|
||||
};
|
||||
|
||||
tracing::info!(
|
||||
id = %pt.id,
|
||||
norm_artist = normalized.artist.as_deref().unwrap_or("-"),
|
||||
norm_title = normalized.title.as_deref().unwrap_or("-"),
|
||||
confidence,
|
||||
status,
|
||||
"Re-processing complete"
|
||||
);
|
||||
|
||||
db::update_pending_normalized(&state.pool, pt.id, status, &normalized, None).await?;
|
||||
|
||||
if status == "approved" {
|
||||
let artist = normalized.artist.as_deref().unwrap_or("Unknown Artist");
|
||||
let album = normalized.album.as_deref().unwrap_or("Unknown Album");
|
||||
let title = normalized.title.as_deref().unwrap_or("Unknown Title");
|
||||
let source = std::path::Path::new(&pt.inbox_path);
|
||||
let ext = source.extension().and_then(|e| e.to_str()).unwrap_or("flac");
|
||||
let track_num = normalized.track_number.unwrap_or(0);
|
||||
|
||||
let dest_filename = if track_num > 0 {
|
||||
format!("{:02} - {}.{}", track_num, sanitize_filename(title), ext)
|
||||
} else {
|
||||
format!("{}.{}", sanitize_filename(title), ext)
|
||||
};
|
||||
|
||||
// Check if already moved
|
||||
let dest = state.config.storage_dir
|
||||
.join(sanitize_filename(artist))
|
||||
.join(sanitize_filename(album))
|
||||
.join(&dest_filename);
|
||||
|
||||
let (storage_path, was_merged) = if dest.exists() && !source.exists() {
|
||||
(dest.to_string_lossy().to_string(), false)
|
||||
} else if source.exists() {
|
||||
match mover::move_to_storage(
|
||||
&state.config.storage_dir, artist, album, &dest_filename, source,
|
||||
).await {
|
||||
Ok(mover::MoveOutcome::Moved(p)) => (p.to_string_lossy().to_string(), false),
|
||||
Ok(mover::MoveOutcome::Merged(p)) => (p.to_string_lossy().to_string(), true),
|
||||
Err(e) => {
|
||||
tracing::error!(id = %pt.id, ?e, "Failed to move file");
|
||||
db::update_pending_status(&state.pool, pt.id, "error", Some(&e.to_string())).await?;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::error!(id = %pt.id, "Source file missing: {:?}", source);
|
||||
db::update_pending_status(&state.pool, pt.id, "error", Some("Source file missing")).await?;
|
||||
continue;
|
||||
};
|
||||
|
||||
match db::approve_and_finalize(&state.pool, pt.id, &storage_path).await {
|
||||
Ok(track_id) => {
|
||||
if was_merged {
|
||||
let _ = db::update_pending_status(&state.pool, pt.id, "merged", None).await;
|
||||
}
|
||||
tracing::info!(id = %pt.id, track_id, "Track finalized");
|
||||
}
|
||||
Err(e) => tracing::error!(id = %pt.id, ?e, "Failed to finalize"),
|
||||
}
|
||||
}
|
||||
|
||||
count += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(id = %pt.id, ?e, "LLM normalization failed");
|
||||
db::update_pending_status(&state.pool, pt.id, "error", Some(&e.to_string())).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Recursively remove empty directories inside the inbox.
|
||||
/// Does not remove the inbox root itself.
|
||||
async fn cleanup_empty_dirs(dir: &std::path::Path) -> bool {
|
||||
let mut entries = match tokio::fs::read_dir(dir).await {
|
||||
Ok(e) => e,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
let mut is_empty = true;
|
||||
while let Ok(Some(entry)) = entries.next_entry().await {
|
||||
let ft = match entry.file_type().await {
|
||||
Ok(ft) => ft,
|
||||
Err(_) => { is_empty = false; continue; }
|
||||
};
|
||||
if ft.is_dir() {
|
||||
let child_empty = Box::pin(cleanup_empty_dirs(&entry.path())).await;
|
||||
if child_empty {
|
||||
if let Err(e) = tokio::fs::remove_dir(&entry.path()).await {
|
||||
tracing::warn!(?e, path = ?entry.path(), "Failed to remove empty directory");
|
||||
} else {
|
||||
tracing::info!(path = ?entry.path(), "Removed empty inbox directory");
|
||||
}
|
||||
} else {
|
||||
is_empty = false;
|
||||
}
|
||||
} else {
|
||||
is_empty = false;
|
||||
}
|
||||
}
|
||||
is_empty
|
||||
}
|
||||
|
||||
/// Recursively collect all audio files and image files under a directory.
|
||||
async fn collect_files(dir: &std::path::Path, audio: &mut Vec<std::path::PathBuf>, images: &mut Vec<std::path::PathBuf>) -> anyhow::Result<()> {
|
||||
let mut entries = tokio::fs::read_dir(dir).await?;
|
||||
while let Some(entry) = entries.next_entry().await? {
|
||||
let name = entry.file_name().to_string_lossy().into_owned();
|
||||
if name.starts_with('.') {
|
||||
continue;
|
||||
}
|
||||
let ft = entry.file_type().await?;
|
||||
if ft.is_dir() {
|
||||
Box::pin(collect_files(&entry.path(), audio, images)).await?;
|
||||
} else if ft.is_file() {
|
||||
if is_audio_file(&name) {
|
||||
audio.push(entry.path());
|
||||
} else if is_cover_image(&name) {
|
||||
images.push(entry.path());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_audio_file(name: &str) -> bool {
|
||||
let ext = name.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
matches!(
|
||||
ext.as_str(),
|
||||
"mp3" | "flac" | "ogg" | "opus" | "aac" | "m4a" | "wav" | "ape" | "wv" | "wma" | "tta" | "aiff" | "aif"
|
||||
)
|
||||
}
|
||||
|
||||
fn is_cover_image(name: &str) -> bool {
|
||||
let ext = name.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
if !matches!(ext.as_str(), "jpg" | "jpeg" | "png" | "webp" | "bmp" | "gif") {
|
||||
return false;
|
||||
}
|
||||
let stem = std::path::Path::new(name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
matches!(
|
||||
stem.as_str(),
|
||||
"cover" | "front" | "folder" | "back" | "booklet" | "inlay" | "disc" | "cd"
|
||||
| "album" | "artwork" | "art" | "scan" | "thumb" | "thumbnail"
|
||||
)
|
||||
}
|
||||
|
||||
fn classify_image(name: &str) -> &'static str {
|
||||
let stem = std::path::Path::new(name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
match stem.as_str() {
|
||||
"back" => "back",
|
||||
"booklet" | "inlay" | "scan" => "booklet",
|
||||
"disc" | "cd" => "disc",
|
||||
_ => "cover",
|
||||
}
|
||||
}
|
||||
|
||||
fn mime_for_image(name: &str) -> &'static str {
|
||||
let ext = name.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
match ext.as_str() {
|
||||
"jpg" | "jpeg" => "image/jpeg",
|
||||
"png" => "image/png",
|
||||
"webp" => "image/webp",
|
||||
"gif" => "image/gif",
|
||||
"bmp" => "image/bmp",
|
||||
_ => "application/octet-stream",
|
||||
}
|
||||
}
|
||||
|
||||
async fn process_file(state: &Arc<AppState>, file_path: &std::path::Path) -> anyhow::Result<bool> {
|
||||
let filename = file_path.file_name().and_then(|n| n.to_str()).unwrap_or("?");
|
||||
tracing::info!(file = filename, "Processing new file: {:?}", file_path);
|
||||
|
||||
// Compute file hash for dedup
|
||||
tracing::info!(file = filename, "Computing file hash...");
|
||||
let path_clone = file_path.to_path_buf();
|
||||
let (hash, file_size) = tokio::task::spawn_blocking(move || -> anyhow::Result<(String, i64)> {
|
||||
let data = std::fs::read(&path_clone)?;
|
||||
let hash = blake3::hash(&data).to_hex().to_string();
|
||||
let size = data.len() as i64;
|
||||
Ok((hash, size))
|
||||
})
|
||||
.await??;
|
||||
tracing::info!(file = filename, hash = &hash[..16], size = file_size, "File hashed");
|
||||
|
||||
// Skip if already known
|
||||
if db::file_hash_exists(&state.pool, &hash).await? {
|
||||
tracing::info!(file = filename, "Skipping: file hash already exists in database");
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Extract raw metadata
|
||||
tracing::info!(file = filename, "Extracting metadata with Symphonia...");
|
||||
let path_for_meta = file_path.to_path_buf();
|
||||
let raw_meta = tokio::task::spawn_blocking(move || metadata::extract(&path_for_meta)).await??;
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
artist = raw_meta.artist.as_deref().unwrap_or("-"),
|
||||
title = raw_meta.title.as_deref().unwrap_or("-"),
|
||||
album = raw_meta.album.as_deref().unwrap_or("-"),
|
||||
"Raw metadata extracted"
|
||||
);
|
||||
|
||||
// Parse path hints relative to inbox dir
|
||||
let relative = file_path.strip_prefix(&state.config.inbox_dir).unwrap_or(file_path);
|
||||
let hints = path_hints::parse(relative);
|
||||
if hints.artist.is_some() || hints.album.is_some() || hints.year.is_some() {
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
path_artist = hints.artist.as_deref().unwrap_or("-"),
|
||||
path_album = hints.album.as_deref().unwrap_or("-"),
|
||||
path_year = ?hints.year,
|
||||
"Path hints parsed"
|
||||
);
|
||||
}
|
||||
|
||||
let inbox_path_str = file_path.to_string_lossy().to_string();
|
||||
|
||||
// Insert pending record
|
||||
tracing::info!(file = filename, "Inserting pending track record...");
|
||||
let pending_id = db::insert_pending(
|
||||
&state.pool,
|
||||
&inbox_path_str,
|
||||
&hash,
|
||||
file_size,
|
||||
&db::RawFields {
|
||||
title: raw_meta.title.clone(),
|
||||
artist: raw_meta.artist.clone(),
|
||||
album: raw_meta.album.clone(),
|
||||
year: raw_meta.year.map(|y| y as i32),
|
||||
track_number: raw_meta.track_number.map(|t| t as i32),
|
||||
genre: raw_meta.genre.clone(),
|
||||
},
|
||||
&db::PathHints {
|
||||
title: hints.title.clone(),
|
||||
artist: hints.artist.clone(),
|
||||
album: hints.album.clone(),
|
||||
year: hints.year,
|
||||
track_number: hints.track_number,
|
||||
},
|
||||
raw_meta.duration_secs,
|
||||
)
|
||||
.await?;
|
||||
|
||||
db::update_pending_status(&state.pool, pending_id, "processing", None).await?;
|
||||
|
||||
// RAG: find similar entries in DB
|
||||
let artist_query = raw_meta.artist.as_deref()
|
||||
.or(hints.artist.as_deref())
|
||||
.unwrap_or("");
|
||||
let album_query = raw_meta.album.as_deref()
|
||||
.or(hints.album.as_deref())
|
||||
.unwrap_or("");
|
||||
|
||||
tracing::info!(file = filename, "Searching database for similar artists/albums...");
|
||||
let similar_artists = if !artist_query.is_empty() {
|
||||
db::find_similar_artists(&state.pool, artist_query, 5).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let similar_albums = if !album_query.is_empty() {
|
||||
db::find_similar_albums(&state.pool, album_query, 5).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
if !similar_artists.is_empty() {
|
||||
let names: Vec<&str> = similar_artists.iter().map(|a| a.name.as_str()).collect();
|
||||
tracing::info!(file = filename, matches = ?names, "Found similar artists in DB");
|
||||
}
|
||||
if !similar_albums.is_empty() {
|
||||
let names: Vec<&str> = similar_albums.iter().map(|a| a.name.as_str()).collect();
|
||||
tracing::info!(file = filename, matches = ?names, "Found similar albums in DB");
|
||||
}
|
||||
|
||||
// Build folder context for the LLM
|
||||
let audio_extensions = ["flac", "mp3", "ogg", "wav", "aac", "m4a", "opus", "wma", "ape", "alac"];
|
||||
let folder_ctx = {
|
||||
let folder = file_path.parent().unwrap_or(file_path);
|
||||
let mut folder_files: Vec<String> = std::fs::read_dir(folder)
|
||||
.ok()
|
||||
.map(|rd| {
|
||||
rd.filter_map(|e| e.ok())
|
||||
.filter_map(|e| {
|
||||
let name = e.file_name().to_string_lossy().into_owned();
|
||||
let ext = name.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
if audio_extensions.contains(&ext.as_str()) { Some(name) } else { None }
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
folder_files.sort();
|
||||
let track_count = folder_files.len();
|
||||
let folder_path = folder
|
||||
.strip_prefix(&state.config.inbox_dir)
|
||||
.unwrap_or(folder)
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
normalize::FolderContext { folder_path, folder_files, track_count }
|
||||
};
|
||||
|
||||
// Call LLM for normalization
|
||||
tracing::info!(file = filename, model = %state.config.ollama_model, "Sending to LLM for normalization...");
|
||||
match normalize::normalize(state, &raw_meta, &hints, &similar_artists, &similar_albums, Some(&folder_ctx)).await {
|
||||
Ok(normalized) => {
|
||||
let confidence = normalized.confidence.unwrap_or(0.0);
|
||||
let status = if confidence >= state.config.confidence_threshold {
|
||||
"approved"
|
||||
} else {
|
||||
"review"
|
||||
};
|
||||
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
norm_artist = normalized.artist.as_deref().unwrap_or("-"),
|
||||
norm_title = normalized.title.as_deref().unwrap_or("-"),
|
||||
norm_album = normalized.album.as_deref().unwrap_or("-"),
|
||||
confidence,
|
||||
status,
|
||||
notes = normalized.notes.as_deref().unwrap_or("-"),
|
||||
"LLM normalization complete"
|
||||
);
|
||||
if !normalized.featured_artists.is_empty() {
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
featured = ?normalized.featured_artists,
|
||||
"Featured artists detected"
|
||||
);
|
||||
}
|
||||
|
||||
db::update_pending_normalized(&state.pool, pending_id, status, &normalized, None).await?;
|
||||
|
||||
// Auto-approve: move file to storage
|
||||
if status == "approved" {
|
||||
let artist = normalized.artist.as_deref().unwrap_or("Unknown Artist");
|
||||
let album = normalized.album.as_deref().unwrap_or("Unknown Album");
|
||||
let title = normalized.title.as_deref().unwrap_or("Unknown Title");
|
||||
let ext = file_path.extension().and_then(|e| e.to_str()).unwrap_or("flac");
|
||||
let track_num = normalized.track_number.unwrap_or(0);
|
||||
|
||||
let dest_filename = if track_num > 0 {
|
||||
format!("{:02} - {}.{}", track_num, sanitize_filename(title), ext)
|
||||
} else {
|
||||
format!("{}.{}", sanitize_filename(title), ext)
|
||||
};
|
||||
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
dest_artist = artist,
|
||||
dest_album = album,
|
||||
dest_filename = %dest_filename,
|
||||
"Auto-approved, moving to storage..."
|
||||
);
|
||||
|
||||
match mover::move_to_storage(
|
||||
&state.config.storage_dir,
|
||||
artist,
|
||||
album,
|
||||
&dest_filename,
|
||||
file_path,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(outcome) => {
|
||||
let (storage_path, was_merged) = match outcome {
|
||||
mover::MoveOutcome::Moved(p) => (p, false),
|
||||
mover::MoveOutcome::Merged(p) => (p, true),
|
||||
};
|
||||
let rel_path = storage_path.to_string_lossy().to_string();
|
||||
match db::approve_and_finalize(&state.pool, pending_id, &rel_path).await {
|
||||
Ok(track_id) => {
|
||||
if was_merged {
|
||||
let _ = db::update_pending_status(&state.pool, pending_id, "merged", None).await;
|
||||
}
|
||||
tracing::info!(file = filename, track_id, storage = %rel_path, "Track finalized in database");
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(file = filename, ?e, "Failed to finalize track in DB after move");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(file = filename, ?e, "Failed to move file to storage");
|
||||
db::update_pending_status(&state.pool, pending_id, "error", Some(&e.to_string())).await?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::info!(file = filename, confidence, "Sent to review queue (below threshold {})", state.config.confidence_threshold);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(file = filename, ?e, "LLM normalization failed");
|
||||
db::update_pending_status(&state.pool, pending_id, "error", Some(&e.to_string())).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Process a cover image found in the inbox.
|
||||
/// Uses path hints (Artist/Album/) to find the matching album in the DB,
|
||||
/// then copies the image to the album's storage folder.
|
||||
async fn process_cover_image(state: &Arc<AppState>, image_path: &std::path::Path) -> anyhow::Result<bool> {
|
||||
let filename = image_path.file_name().and_then(|n| n.to_str()).unwrap_or("?");
|
||||
|
||||
// Hash for dedup
|
||||
let path_clone = image_path.to_path_buf();
|
||||
let (hash, file_size) = tokio::task::spawn_blocking(move || -> anyhow::Result<(String, i64)> {
|
||||
let data = std::fs::read(&path_clone)?;
|
||||
let hash = blake3::hash(&data).to_hex().to_string();
|
||||
let size = data.len() as i64;
|
||||
Ok((hash, size))
|
||||
})
|
||||
.await??;
|
||||
|
||||
if db::image_hash_exists(&state.pool, &hash).await? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Derive artist/album from path hints
|
||||
let relative = image_path.strip_prefix(&state.config.inbox_dir).unwrap_or(image_path);
|
||||
let components: Vec<&str> = relative
|
||||
.components()
|
||||
.filter_map(|c| c.as_os_str().to_str())
|
||||
.collect();
|
||||
|
||||
tracing::info!(file = filename, path = ?relative, components = components.len(), "Processing cover image");
|
||||
|
||||
// Supported structures:
|
||||
// Artist/Album/image.jpg (3+ components)
|
||||
// Album/image.jpg (2 components — album dir + image)
|
||||
if components.len() < 2 {
|
||||
tracing::info!(file = filename, "Cover image not inside an album folder, skipping");
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// The directory directly containing the image is always the album hint
|
||||
let album_raw = components[components.len() - 2];
|
||||
let path_artist = if components.len() >= 3 {
|
||||
Some(components[components.len() - 3])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (album_name, _) = path_hints::parse_album_year_public(album_raw);
|
||||
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
path_artist = path_artist.unwrap_or("-"),
|
||||
album_hint = %album_name,
|
||||
"Looking up album in database..."
|
||||
);
|
||||
|
||||
// Try to find album in DB — try with artist if available, then without
|
||||
let album_id = if let Some(artist) = path_artist {
|
||||
find_album_for_cover(&state.pool, artist, &album_name).await?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// If not found with artist, try fuzzy album name match across all artists
|
||||
let album_id = match album_id {
|
||||
Some(id) => Some(id),
|
||||
None => {
|
||||
let similar_albums = db::find_similar_albums(&state.pool, &album_name, 3).await.unwrap_or_default();
|
||||
if let Some(best) = similar_albums.first() {
|
||||
if best.similarity > 0.5 {
|
||||
tracing::info!(file = filename, album = %best.name, similarity = best.similarity, "Matched album by fuzzy search");
|
||||
Some(best.id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let album_id = match album_id {
|
||||
Some(id) => id,
|
||||
None => {
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
artist = path_artist.unwrap_or("-"),
|
||||
album = %album_name,
|
||||
"No matching album found in DB, skipping cover"
|
||||
);
|
||||
return Ok(false);
|
||||
}
|
||||
};
|
||||
|
||||
// Determine image type and move to storage
|
||||
let image_type = classify_image(filename);
|
||||
let mime = mime_for_image(filename);
|
||||
|
||||
// Get album's storage path from any track in that album
|
||||
let storage_dir_opt: Option<(String,)> = sqlx::query_as(
|
||||
"SELECT storage_path FROM tracks WHERE album_id = $1 LIMIT 1"
|
||||
)
|
||||
.bind(album_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let album_storage_dir = match storage_dir_opt {
|
||||
Some((track_path,)) => {
|
||||
let p = std::path::Path::new(&track_path);
|
||||
match p.parent() {
|
||||
Some(dir) if dir.is_dir() => dir.to_path_buf(),
|
||||
_ => {
|
||||
tracing::warn!(file = filename, track_path = %track_path, "Track storage path has no valid parent dir");
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
tracing::info!(file = filename, album_id, "Album has no tracks in storage yet, skipping cover");
|
||||
return Ok(false);
|
||||
}
|
||||
};
|
||||
tracing::info!(file = filename, dest_dir = ?album_storage_dir, "Will copy cover to album storage dir");
|
||||
|
||||
let dest = album_storage_dir.join(filename);
|
||||
if !dest.exists() {
|
||||
// Move or copy image
|
||||
match tokio::fs::rename(image_path, &dest).await {
|
||||
Ok(()) => {}
|
||||
Err(_) => {
|
||||
tokio::fs::copy(image_path, &dest).await?;
|
||||
tokio::fs::remove_file(image_path).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let dest_str = dest.to_string_lossy().to_string();
|
||||
db::insert_album_image(&state.pool, album_id, image_type, &dest_str, &hash, mime, file_size).await?;
|
||||
|
||||
tracing::info!(
|
||||
file = filename,
|
||||
album_id,
|
||||
image_type,
|
||||
dest = %dest_str,
|
||||
"Album image saved"
|
||||
);
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Find an album in DB matching the path-derived artist and album name.
|
||||
/// Tries exact match, then fuzzy artist + exact album, then fuzzy artist + fuzzy album.
|
||||
async fn find_album_for_cover(pool: &sqlx::PgPool, path_artist: &str, album_name: &str) -> anyhow::Result<Option<i64>> {
|
||||
// Try exact match first
|
||||
if let Some(id) = db::find_album_id(pool, path_artist, album_name).await? {
|
||||
return Ok(Some(id));
|
||||
}
|
||||
|
||||
// Try fuzzy artist, then exact or fuzzy album under that artist
|
||||
let similar_artists = db::find_similar_artists(pool, path_artist, 5).await.unwrap_or_default();
|
||||
for artist in &similar_artists {
|
||||
if artist.similarity < 0.3 {
|
||||
continue;
|
||||
}
|
||||
// Exact album under fuzzy artist
|
||||
if let Some(id) = db::find_album_id(pool, &artist.name, album_name).await? {
|
||||
return Ok(Some(id));
|
||||
}
|
||||
// Fuzzy album under this artist
|
||||
let similar_albums = db::find_similar_albums(pool, album_name, 3).await.unwrap_or_default();
|
||||
for album in &similar_albums {
|
||||
if album.artist_id == artist.id && album.similarity > 0.4 {
|
||||
return Ok(Some(album.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Remove characters that are unsafe for filenames.
|
||||
fn sanitize_filename(name: &str) -> String {
|
||||
name.chars()
|
||||
.map(|c| match c {
|
||||
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
|
||||
_ => c,
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.to_owned()
|
||||
}
|
||||
67
furumi-agent/src/ingest/mover.rs
Normal file
67
furumi-agent/src/ingest/mover.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub enum MoveOutcome {
|
||||
/// File was moved/renamed to destination.
|
||||
Moved(PathBuf),
|
||||
/// Destination already existed; inbox duplicate was removed.
|
||||
Merged(PathBuf),
|
||||
}
|
||||
|
||||
/// Move a file from inbox to the permanent storage directory.
|
||||
///
|
||||
/// Creates the directory structure: `storage_dir/artist/album/filename`
|
||||
/// Returns the full path of the moved file.
|
||||
///
|
||||
/// If `rename` fails (cross-device), falls back to copy + remove.
|
||||
/// If the destination already exists the inbox copy is removed and
|
||||
/// `MoveOutcome::Merged` is returned instead of an error.
|
||||
pub async fn move_to_storage(
|
||||
storage_dir: &Path,
|
||||
artist: &str,
|
||||
album: &str,
|
||||
filename: &str,
|
||||
source: &Path,
|
||||
) -> anyhow::Result<MoveOutcome> {
|
||||
let artist_dir = sanitize_dir_name(artist);
|
||||
let album_dir = sanitize_dir_name(album);
|
||||
|
||||
let dest_dir = storage_dir.join(&artist_dir).join(&album_dir);
|
||||
tokio::fs::create_dir_all(&dest_dir).await?;
|
||||
|
||||
let dest = dest_dir.join(filename);
|
||||
|
||||
// File already at destination — remove the inbox duplicate
|
||||
if dest.exists() {
|
||||
if source.exists() {
|
||||
tokio::fs::remove_file(source).await?;
|
||||
tracing::info!(from = ?source, to = ?dest, "merged duplicate into existing storage file");
|
||||
}
|
||||
return Ok(MoveOutcome::Merged(dest));
|
||||
}
|
||||
|
||||
// Try atomic rename first (same filesystem)
|
||||
match tokio::fs::rename(source, &dest).await {
|
||||
Ok(()) => {}
|
||||
Err(_) => {
|
||||
// Cross-device: copy then remove
|
||||
tokio::fs::copy(source, &dest).await?;
|
||||
tokio::fs::remove_file(source).await?;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!(from = ?source, to = ?dest, "moved file to storage");
|
||||
Ok(MoveOutcome::Moved(dest))
|
||||
}
|
||||
|
||||
/// Remove characters that are unsafe for directory names.
|
||||
fn sanitize_dir_name(name: &str) -> String {
|
||||
name.chars()
|
||||
.map(|c| match c {
|
||||
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' | '\0' => '_',
|
||||
_ => c,
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.trim_matches('.')
|
||||
.to_owned()
|
||||
}
|
||||
245
furumi-agent/src/ingest/normalize.rs
Normal file
245
furumi-agent/src/ingest/normalize.rs
Normal file
@@ -0,0 +1,245 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::db::{NormalizedFields, SimilarAlbum, SimilarArtist};
|
||||
use crate::web::AppState;
|
||||
|
||||
use super::metadata::RawMetadata;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FolderContext {
|
||||
pub folder_path: String, // path relative to inbox_dir (e.g. "Kunteynir/Синглы/Пьюк")
|
||||
pub folder_files: Vec<String>, // audio filenames in the same folder
|
||||
pub track_count: usize, // number of audio files in folder
|
||||
}
|
||||
|
||||
/// Build the user message with all context and call Ollama for normalization.
|
||||
pub async fn normalize(
|
||||
state: &Arc<AppState>,
|
||||
raw: &RawMetadata,
|
||||
hints: &crate::db::PathHints,
|
||||
similar_artists: &[SimilarArtist],
|
||||
similar_albums: &[SimilarAlbum],
|
||||
folder_ctx: Option<&FolderContext>,
|
||||
) -> anyhow::Result<NormalizedFields> {
|
||||
let user_message = build_user_message(raw, hints, similar_artists, similar_albums, folder_ctx);
|
||||
|
||||
let response = call_ollama(
|
||||
&state.config.ollama_url,
|
||||
&state.config.ollama_model,
|
||||
&state.system_prompt,
|
||||
&user_message,
|
||||
state.config.ollama_auth.as_deref(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
parse_response(&response)
|
||||
}
|
||||
|
||||
fn build_user_message(
|
||||
raw: &RawMetadata,
|
||||
hints: &crate::db::PathHints,
|
||||
similar_artists: &[SimilarArtist],
|
||||
similar_albums: &[SimilarAlbum],
|
||||
folder_ctx: Option<&FolderContext>,
|
||||
) -> String {
|
||||
let mut msg = String::from("## Raw metadata from file tags\n");
|
||||
|
||||
if let Some(v) = &raw.title {
|
||||
msg.push_str(&format!("Title: \"{}\"\n", v));
|
||||
}
|
||||
if let Some(v) = &raw.artist {
|
||||
msg.push_str(&format!("Artist: \"{}\"\n", v));
|
||||
}
|
||||
if let Some(v) = &raw.album {
|
||||
msg.push_str(&format!("Album: \"{}\"\n", v));
|
||||
}
|
||||
if let Some(v) = raw.year {
|
||||
msg.push_str(&format!("Year: {}\n", v));
|
||||
}
|
||||
if let Some(v) = raw.track_number {
|
||||
msg.push_str(&format!("Track number: {}\n", v));
|
||||
}
|
||||
if let Some(v) = &raw.genre {
|
||||
msg.push_str(&format!("Genre: \"{}\"\n", v));
|
||||
}
|
||||
|
||||
msg.push_str("\n## Hints from file path\n");
|
||||
if let Some(v) = &hints.artist {
|
||||
msg.push_str(&format!("Path artist: \"{}\"\n", v));
|
||||
}
|
||||
if let Some(v) = &hints.album {
|
||||
msg.push_str(&format!("Path album: \"{}\"\n", v));
|
||||
}
|
||||
if let Some(v) = hints.year {
|
||||
msg.push_str(&format!("Path year: {}\n", v));
|
||||
}
|
||||
if let Some(v) = hints.track_number {
|
||||
msg.push_str(&format!("Path track number: {}\n", v));
|
||||
}
|
||||
if let Some(v) = &hints.title {
|
||||
msg.push_str(&format!("Path title: \"{}\"\n", v));
|
||||
}
|
||||
|
||||
if !similar_artists.is_empty() {
|
||||
msg.push_str("\n## Existing artists in database (similar matches)\n");
|
||||
for a in similar_artists {
|
||||
msg.push_str(&format!("- \"{}\" (similarity: {:.2})\n", a.name, a.similarity));
|
||||
}
|
||||
}
|
||||
|
||||
if !similar_albums.is_empty() {
|
||||
msg.push_str("\n## Existing albums in database (similar matches)\n");
|
||||
for a in similar_albums {
|
||||
let year_str = a.year.map(|y| format!(", year: {}", y)).unwrap_or_default();
|
||||
msg.push_str(&format!("- \"{}\" (similarity: {:.2}{})\n", a.name, a.similarity, year_str));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ctx) = folder_ctx {
|
||||
msg.push_str("\n## Folder context\n");
|
||||
msg.push_str(&format!("Folder path: \"{}\"\n", ctx.folder_path));
|
||||
msg.push_str(&format!("Track count in folder: {}\n", ctx.track_count));
|
||||
if !ctx.folder_files.is_empty() {
|
||||
msg.push_str("Files in folder:\n");
|
||||
for f in &ctx.folder_files {
|
||||
msg.push_str(&format!(" - {}\n", f));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
msg
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequest {
|
||||
model: String,
|
||||
messages: Vec<OllamaMessage>,
|
||||
format: String,
|
||||
stream: bool,
|
||||
options: OllamaOptions,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaMessage {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaOptions {
|
||||
temperature: f64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponse {
|
||||
message: OllamaResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponseMessage {
|
||||
content: String,
|
||||
}
|
||||
|
||||
pub async fn call_ollama(
|
||||
base_url: &str,
|
||||
model: &str,
|
||||
system_prompt: &str,
|
||||
user_message: &str,
|
||||
auth: Option<&str>,
|
||||
) -> anyhow::Result<String> {
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(120))
|
||||
.build()?;
|
||||
|
||||
let request = OllamaRequest {
|
||||
model: model.to_owned(),
|
||||
messages: vec![
|
||||
OllamaMessage {
|
||||
role: "system".to_owned(),
|
||||
content: system_prompt.to_owned(),
|
||||
},
|
||||
OllamaMessage {
|
||||
role: "user".to_owned(),
|
||||
content: user_message.to_owned(),
|
||||
},
|
||||
],
|
||||
format: "json".to_owned(),
|
||||
stream: false,
|
||||
options: OllamaOptions { temperature: 0.1 },
|
||||
};
|
||||
|
||||
let url = format!("{}/api/chat", base_url.trim_end_matches('/'));
|
||||
tracing::info!(%url, model, prompt_len = user_message.len(), "Calling Ollama API...");
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
let mut req = client.post(&url).json(&request);
|
||||
if let Some(auth_header) = auth {
|
||||
req = req.header("Authorization", auth_header);
|
||||
}
|
||||
let resp = req.send().await?;
|
||||
let elapsed = start.elapsed();
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
tracing::error!(%status, body = &body[..body.len().min(500)], "Ollama API error");
|
||||
anyhow::bail!("Ollama returned {}: {}", status, body);
|
||||
}
|
||||
|
||||
let ollama_resp: OllamaResponse = resp.json().await?;
|
||||
tracing::info!(
|
||||
elapsed_ms = elapsed.as_millis() as u64,
|
||||
response_len = ollama_resp.message.content.len(),
|
||||
"Ollama response received"
|
||||
);
|
||||
tracing::debug!(raw_response = %ollama_resp.message.content, "LLM raw output");
|
||||
Ok(ollama_resp.message.content)
|
||||
}
|
||||
|
||||
/// Parse the LLM JSON response into NormalizedFields.
|
||||
/// Handles both clean JSON and JSON wrapped in markdown code fences.
|
||||
fn parse_response(response: &str) -> anyhow::Result<NormalizedFields> {
|
||||
let cleaned = response.trim();
|
||||
|
||||
// Strip markdown code fences if present
|
||||
let json_str = if cleaned.starts_with("```") {
|
||||
let start = cleaned.find('{').unwrap_or(0);
|
||||
let end = cleaned.rfind('}').map(|i| i + 1).unwrap_or(cleaned.len());
|
||||
&cleaned[start..end]
|
||||
} else {
|
||||
cleaned
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct LlmOutput {
|
||||
artist: Option<String>,
|
||||
album: Option<String>,
|
||||
title: Option<String>,
|
||||
year: Option<i32>,
|
||||
track_number: Option<i32>,
|
||||
genre: Option<String>,
|
||||
#[serde(default)]
|
||||
featured_artists: Vec<String>,
|
||||
release_type: Option<String>,
|
||||
confidence: Option<f64>,
|
||||
notes: Option<String>,
|
||||
}
|
||||
|
||||
let parsed: LlmOutput = serde_json::from_str(json_str)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse LLM response as JSON: {} — raw: {}", e, response))?;
|
||||
|
||||
Ok(NormalizedFields {
|
||||
title: parsed.title,
|
||||
artist: parsed.artist,
|
||||
album: parsed.album,
|
||||
year: parsed.year,
|
||||
track_number: parsed.track_number,
|
||||
genre: parsed.genre,
|
||||
featured_artists: parsed.featured_artists,
|
||||
release_type: parsed.release_type,
|
||||
confidence: parsed.confidence,
|
||||
notes: parsed.notes,
|
||||
})
|
||||
}
|
||||
203
furumi-agent/src/ingest/path_hints.rs
Normal file
203
furumi-agent/src/ingest/path_hints.rs
Normal file
@@ -0,0 +1,203 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::db::PathHints;
|
||||
|
||||
/// Parse metadata hints from the file path relative to the inbox directory.
|
||||
///
|
||||
/// Recognized patterns:
|
||||
/// Artist/Album/01 - Title.ext
|
||||
/// Artist/Album (Year)/01 - Title.ext
|
||||
/// Artist/(Year) Album/01 - Title.ext
|
||||
/// Artist/Album [Year]/01 - Title.ext
|
||||
/// 01 - Title.ext (flat, no artist/album)
|
||||
pub fn parse(relative_path: &Path) -> PathHints {
|
||||
let components: Vec<&str> = relative_path
|
||||
.components()
|
||||
.filter_map(|c| c.as_os_str().to_str())
|
||||
.collect();
|
||||
|
||||
let mut hints = PathHints::default();
|
||||
|
||||
let filename = components.last().copied().unwrap_or("");
|
||||
let stem = Path::new(filename)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("");
|
||||
|
||||
// Parse track number and title from filename
|
||||
parse_filename(stem, &mut hints);
|
||||
|
||||
match components.len() {
|
||||
// Artist/Album/file.ext
|
||||
3.. => {
|
||||
hints.artist = Some(components[0].to_owned());
|
||||
|
||||
let album_raw = components[1];
|
||||
let (album, year) = parse_album_with_year(album_raw);
|
||||
hints.album = Some(album);
|
||||
if year.is_some() {
|
||||
hints.year = year;
|
||||
}
|
||||
}
|
||||
// Album/file.ext (or Artist/file.ext — ambiguous, treat as album)
|
||||
2 => {
|
||||
let dir = components[0];
|
||||
let (name, year) = parse_album_with_year(dir);
|
||||
hints.album = Some(name);
|
||||
if year.is_some() {
|
||||
hints.year = year;
|
||||
}
|
||||
}
|
||||
// Just file.ext
|
||||
_ => {}
|
||||
}
|
||||
|
||||
hints
|
||||
}
|
||||
|
||||
/// Try to extract track number and title from a filename stem.
|
||||
///
|
||||
/// Patterns: "01 - Title", "01. Title", "1 Title", "Title"
|
||||
fn parse_filename(stem: &str, hints: &mut PathHints) {
|
||||
let trimmed = stem.trim();
|
||||
|
||||
// Try "NN - Title" or "NN. Title"
|
||||
if let Some(rest) = try_strip_track_prefix(trimmed) {
|
||||
let (num_str, title) = rest;
|
||||
if let Ok(num) = num_str.parse::<i32>() {
|
||||
hints.track_number = Some(num);
|
||||
if !title.is_empty() {
|
||||
hints.title = Some(title.to_owned());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// No track number found, use full stem as title
|
||||
if !trimmed.is_empty() {
|
||||
hints.title = Some(trimmed.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse "NN - Rest" or "NN. Rest" from a string.
|
||||
/// Returns (number_str, rest) if successful.
|
||||
fn try_strip_track_prefix(s: &str) -> Option<(&str, &str)> {
|
||||
// Find leading digits
|
||||
let digit_end = s.find(|c: char| !c.is_ascii_digit())?;
|
||||
if digit_end == 0 {
|
||||
return None;
|
||||
}
|
||||
let num_str = &s[..digit_end];
|
||||
let rest = s[digit_end..].trim_start();
|
||||
|
||||
// Expect separator: " - ", ". ", "- ", or just space if followed by letter
|
||||
let title = if let Some(stripped) = rest.strip_prefix("- ") {
|
||||
stripped.trim()
|
||||
} else if let Some(stripped) = rest.strip_prefix(". ") {
|
||||
stripped.trim()
|
||||
} else if let Some(stripped) = rest.strip_prefix('.') {
|
||||
stripped.trim()
|
||||
} else if let Some(stripped) = rest.strip_prefix("- ") {
|
||||
stripped.trim()
|
||||
} else {
|
||||
// Just "01 Title" — digits followed by space then text
|
||||
rest
|
||||
};
|
||||
|
||||
Some((num_str, title))
|
||||
}
|
||||
|
||||
/// Public wrapper for cover image processing.
|
||||
pub fn parse_album_year_public(dir: &str) -> (String, Option<i32>) {
|
||||
parse_album_with_year(dir)
|
||||
}
|
||||
|
||||
/// Extract album name and optional year from directory name.
|
||||
///
|
||||
/// Patterns: "Album (2001)", "(2001) Album", "Album [2001]", "Album"
|
||||
fn parse_album_with_year(dir: &str) -> (String, Option<i32>) {
|
||||
// Try "Album (YYYY)" or "Album [YYYY]"
|
||||
for (open, close) in [('(', ')'), ('[', ']')] {
|
||||
if let Some(start) = dir.rfind(open) {
|
||||
if let Some(end) = dir[start..].find(close) {
|
||||
let inside = &dir[start + 1..start + end];
|
||||
if let Ok(year) = inside.trim().parse::<i32>() {
|
||||
if (1900..=2100).contains(&year) {
|
||||
let album = format!("{}{}", &dir[..start].trim(), &dir[start + end + 1..].trim());
|
||||
let album = album.trim().to_owned();
|
||||
return (album, Some(year));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try "(YYYY) Album"
|
||||
if dir.starts_with('(') {
|
||||
if let Some(end) = dir.find(')') {
|
||||
let inside = &dir[1..end];
|
||||
if let Ok(year) = inside.trim().parse::<i32>() {
|
||||
if (1900..=2100).contains(&year) {
|
||||
let album = dir[end + 1..].trim().to_owned();
|
||||
return (album, Some(year));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(dir.to_owned(), None)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_artist_album_track() {
|
||||
let p = PathBuf::from("Pink Floyd/Wish You Were Here (1975)/03 - Have a Cigar.flac");
|
||||
let h = parse(&p);
|
||||
assert_eq!(h.artist.as_deref(), Some("Pink Floyd"));
|
||||
assert_eq!(h.album.as_deref(), Some("Wish You Were Here"));
|
||||
assert_eq!(h.year, Some(1975));
|
||||
assert_eq!(h.track_number, Some(3));
|
||||
assert_eq!(h.title.as_deref(), Some("Have a Cigar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_year_prefix() {
|
||||
let p = PathBuf::from("Artist/(2020) Album Name/01. Song.flac");
|
||||
let h = parse(&p);
|
||||
assert_eq!(h.artist.as_deref(), Some("Artist"));
|
||||
assert_eq!(h.album.as_deref(), Some("Album Name"));
|
||||
assert_eq!(h.year, Some(2020));
|
||||
assert_eq!(h.track_number, Some(1));
|
||||
assert_eq!(h.title.as_deref(), Some("Song"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_flat_file() {
|
||||
let p = PathBuf::from("05 - Something.mp3");
|
||||
let h = parse(&p);
|
||||
assert_eq!(h.artist, None);
|
||||
assert_eq!(h.album, None);
|
||||
assert_eq!(h.track_number, Some(5));
|
||||
assert_eq!(h.title.as_deref(), Some("Something"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_track_number() {
|
||||
let p = PathBuf::from("Artist/Album/Song Name.flac");
|
||||
let h = parse(&p);
|
||||
assert_eq!(h.track_number, None);
|
||||
assert_eq!(h.title.as_deref(), Some("Song Name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_square_bracket_year() {
|
||||
let p = PathBuf::from("Band/Album [1999]/track.flac");
|
||||
let h = parse(&p);
|
||||
assert_eq!(h.album.as_deref(), Some("Album"));
|
||||
assert_eq!(h.year, Some(1999));
|
||||
}
|
||||
}
|
||||
63
furumi-agent/src/main.rs
Normal file
63
furumi-agent/src/main.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
mod config;
|
||||
mod db;
|
||||
mod ingest;
|
||||
mod merge;
|
||||
mod web;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let args = config::Args::parse();
|
||||
args.validate()?;
|
||||
|
||||
let version = option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION"));
|
||||
tracing::info!("Furumi Agent v{} starting", version);
|
||||
tracing::info!("Inbox directory: {:?}", args.inbox_dir);
|
||||
tracing::info!("Storage directory: {:?}", args.storage_dir);
|
||||
tracing::info!("Ollama: {} (model: {})", args.ollama_url, args.ollama_model);
|
||||
tracing::info!("Confidence threshold: {}", args.confidence_threshold);
|
||||
|
||||
let system_prompt = args.load_system_prompt()?;
|
||||
tracing::info!("System prompt loaded: {} chars", system_prompt.len());
|
||||
|
||||
let merge_prompt = args.load_merge_prompt()?;
|
||||
tracing::info!("Merge prompt loaded: {} chars", merge_prompt.len());
|
||||
|
||||
tracing::info!("Connecting to database...");
|
||||
let pool = db::connect(&args.database_url).await?;
|
||||
tracing::info!("Running database migrations...");
|
||||
db::migrate(&pool).await?;
|
||||
tracing::info!("Database ready");
|
||||
|
||||
let state = Arc::new(web::AppState {
|
||||
pool: pool.clone(),
|
||||
config: Arc::new(args),
|
||||
system_prompt: Arc::new(system_prompt),
|
||||
merge_prompt: Arc::new(merge_prompt),
|
||||
});
|
||||
|
||||
// Spawn the ingest pipeline as a background task
|
||||
let ingest_state = state.clone();
|
||||
tokio::spawn(async move {
|
||||
ingest::run(ingest_state).await;
|
||||
});
|
||||
|
||||
// Start the admin web UI
|
||||
let bind_addr: std::net::SocketAddr = state.config.bind.parse().unwrap_or_else(|e| {
|
||||
eprintln!("Error: Invalid bind address '{}': {}", state.config.bind, e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
tracing::info!("Admin UI: http://{}", bind_addr);
|
||||
|
||||
let app = web::build_router(state);
|
||||
let listener = tokio::net::TcpListener::bind(bind_addr).await?;
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
355
furumi-agent/src/merge.rs
Normal file
355
furumi-agent/src/merge.rs
Normal file
@@ -0,0 +1,355 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::db;
|
||||
use crate::web::AppState;
|
||||
use crate::ingest::normalize::call_ollama;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct MergeProposal {
|
||||
pub canonical_artist_name: String,
|
||||
pub winner_artist_id: i64,
|
||||
pub album_mappings: Vec<AlbumMapping>,
|
||||
pub notes: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AlbumMapping {
|
||||
pub source_album_id: i64,
|
||||
pub canonical_name: String,
|
||||
pub merge_into_album_id: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn propose_merge(state: &Arc<AppState>, merge_id: Uuid) -> anyhow::Result<()> {
|
||||
db::update_merge_status(&state.pool, merge_id, "processing", None).await?;
|
||||
|
||||
let merge = db::get_artist_merge(&state.pool, merge_id).await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Merge not found: {}", merge_id))?;
|
||||
|
||||
let source_ids: Vec<i64> = serde_json::from_str(&merge.source_artist_ids)
|
||||
.map_err(|e| anyhow::anyhow!("Invalid source_artist_ids: {}", e))?;
|
||||
|
||||
let artists_data = db::get_artists_full_data(&state.pool, &source_ids).await?;
|
||||
|
||||
let user_message = build_merge_message(&artists_data);
|
||||
|
||||
let response = call_ollama(
|
||||
&state.config.ollama_url,
|
||||
&state.config.ollama_model,
|
||||
&state.merge_prompt,
|
||||
&user_message,
|
||||
state.config.ollama_auth.as_deref(),
|
||||
).await?;
|
||||
|
||||
let proposal = parse_merge_response(&response)?;
|
||||
let notes = proposal.notes.clone();
|
||||
let proposal_json = serde_json::to_string(&proposal)?;
|
||||
|
||||
db::update_merge_proposal(&state.pool, merge_id, &proposal_json, ¬es).await?;
|
||||
tracing::info!(id = %merge_id, "Merge proposal generated");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_merge_message(artists: &[db::ArtistFullData]) -> String {
|
||||
let mut msg = String::from("## Artists to merge\n\n");
|
||||
for artist in artists {
|
||||
msg.push_str(&format!("### Artist ID {}: \"{}\"\n", artist.id, artist.name));
|
||||
if artist.albums.is_empty() {
|
||||
msg.push_str(" (no albums)\n");
|
||||
}
|
||||
for album in &artist.albums {
|
||||
let year_str = album.year.map(|y| format!(" ({})", y)).unwrap_or_default();
|
||||
msg.push_str(&format!(" Album ID {}: \"{}\"{}\n", album.id, album.name, year_str));
|
||||
for track in &album.tracks {
|
||||
let num = track.track_number.map(|n| format!("{:02}. ", n)).unwrap_or_default();
|
||||
msg.push_str(&format!(" - {}\"{}\" [track_id={}]\n", num, track.title, track.id));
|
||||
}
|
||||
}
|
||||
msg.push('\n');
|
||||
}
|
||||
msg
|
||||
}
|
||||
|
||||
fn parse_merge_response(response: &str) -> anyhow::Result<MergeProposal> {
|
||||
let cleaned = response.trim();
|
||||
let json_str = if cleaned.starts_with("```") {
|
||||
let start = cleaned.find('{').unwrap_or(0);
|
||||
let end = cleaned.rfind('}').map(|i| i + 1).unwrap_or(cleaned.len());
|
||||
&cleaned[start..end]
|
||||
} else {
|
||||
cleaned
|
||||
};
|
||||
serde_json::from_str(json_str)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse merge LLM response: {} — raw: {}", e, response))
|
||||
}
|
||||
|
||||
pub async fn execute_merge(state: &Arc<AppState>, merge_id: Uuid) -> anyhow::Result<()> {
|
||||
let merge = db::get_artist_merge(&state.pool, merge_id).await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Merge not found"))?;
|
||||
|
||||
let proposal_str = merge.proposal.ok_or_else(|| anyhow::anyhow!("No proposal to execute"))?;
|
||||
let proposal: MergeProposal = serde_json::from_str(&proposal_str)?;
|
||||
|
||||
let source_ids: Vec<i64> = serde_json::from_str(&merge.source_artist_ids)?;
|
||||
let loser_ids: Vec<i64> = source_ids.iter().copied()
|
||||
.filter(|&id| id != proposal.winner_artist_id).collect();
|
||||
|
||||
// Execute all DB mutations in a single atomic transaction.
|
||||
// On error the transaction rolls back automatically (dropped without commit).
|
||||
let mut tx = state.pool.begin().await?;
|
||||
if let Err(e) = merge_db(&mut tx, &proposal, &loser_ids).await {
|
||||
// tx is dropped here → auto-rollback
|
||||
return Err(e);
|
||||
}
|
||||
tx.commit().await?;
|
||||
|
||||
// Move files after commit (best-effort; storage_path updated per file)
|
||||
let tracks = db::get_tracks_with_albums_for_artist(&state.pool, proposal.winner_artist_id).await?;
|
||||
for track in &tracks {
|
||||
let current = std::path::Path::new(&track.storage_path);
|
||||
let filename = match current.file_name() {
|
||||
Some(f) => f.to_string_lossy().to_string(),
|
||||
None => continue,
|
||||
};
|
||||
let album_name = track.album_name.as_deref().unwrap_or("Unknown Album");
|
||||
let new_path = state.config.storage_dir
|
||||
.join(sanitize(&proposal.canonical_artist_name))
|
||||
.join(sanitize(album_name))
|
||||
.join(&filename);
|
||||
|
||||
if current != new_path.as_path() {
|
||||
if current.exists() {
|
||||
if let Some(parent) = new_path.parent() {
|
||||
let _ = tokio::fs::create_dir_all(parent).await;
|
||||
}
|
||||
let moved = tokio::fs::rename(current, &new_path).await;
|
||||
if moved.is_err() {
|
||||
if let Ok(_) = tokio::fs::copy(current, &new_path).await {
|
||||
let _ = tokio::fs::remove_file(current).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
db::update_track_storage_path(&state.pool, track.id, &new_path.to_string_lossy()).await?;
|
||||
}
|
||||
}
|
||||
|
||||
db::update_merge_status(&state.pool, merge_id, "approved", None).await?;
|
||||
tracing::info!(id = %merge_id, "Merge executed successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// All DB mutations for a merge, executed inside a single transaction.
|
||||
/// `tx` is a `Transaction<'_, Postgres>` which derefs to `PgConnection`.
|
||||
async fn merge_db(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
proposal: &MergeProposal,
|
||||
loser_ids: &[i64],
|
||||
) -> anyhow::Result<()> {
|
||||
// 0. Validate proposal — ensure winner and all album IDs belong to source artists
|
||||
let source_ids: Vec<i64> = loser_ids.iter().copied()
|
||||
.chain(std::iter::once(proposal.winner_artist_id))
|
||||
.collect();
|
||||
|
||||
// Verify winner_artist_id is one of the source artists
|
||||
if !source_ids.contains(&proposal.winner_artist_id) {
|
||||
anyhow::bail!(
|
||||
"winner_artist_id {} is not among source artists {:?}",
|
||||
proposal.winner_artist_id, source_ids
|
||||
);
|
||||
}
|
||||
|
||||
// Build set of valid album IDs (albums that actually belong to source artists)
|
||||
let mut valid_album_ids = std::collections::HashSet::<i64>::new();
|
||||
for &src_id in &source_ids {
|
||||
let rows: Vec<(i64,)> = sqlx::query_as("SELECT id FROM albums WHERE artist_id = $1")
|
||||
.bind(src_id).fetch_all(&mut **tx).await?;
|
||||
for (id,) in rows { valid_album_ids.insert(id); }
|
||||
}
|
||||
|
||||
// 1. Rename winner artist to canonical name
|
||||
sqlx::query("UPDATE artists SET name = $2 WHERE id = $1")
|
||||
.bind(proposal.winner_artist_id)
|
||||
.bind(&proposal.canonical_artist_name)
|
||||
.execute(&mut **tx).await?;
|
||||
|
||||
// 2. Process album mappings from the proposal
|
||||
for mapping in &proposal.album_mappings {
|
||||
// Skip albums that don't belong to any source artist (LLM hallucinated IDs)
|
||||
if !valid_album_ids.contains(&mapping.source_album_id) {
|
||||
tracing::warn!(
|
||||
album_id = mapping.source_album_id,
|
||||
"Skipping album mapping: album does not belong to source artists"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip if source was already processed (idempotent retry support)
|
||||
let src_exists: (bool,) = sqlx::query_as("SELECT EXISTS(SELECT 1 FROM albums WHERE id = $1)")
|
||||
.bind(mapping.source_album_id)
|
||||
.fetch_one(&mut **tx).await?;
|
||||
if !src_exists.0 { continue; }
|
||||
|
||||
if let Some(target_id) = mapping.merge_into_album_id {
|
||||
album_merge_into(tx, mapping.source_album_id, target_id).await?;
|
||||
} else {
|
||||
// Rename first
|
||||
sqlx::query("UPDATE albums SET name = $2 WHERE id = $1")
|
||||
.bind(mapping.source_album_id)
|
||||
.bind(&mapping.canonical_name)
|
||||
.execute(&mut **tx).await?;
|
||||
|
||||
// Check if winner already has an album with this canonical name (excluding self)
|
||||
let conflict: Option<(i64,)> = sqlx::query_as(
|
||||
"SELECT id FROM albums WHERE artist_id = $1 AND name = $2 AND id != $3"
|
||||
)
|
||||
.bind(proposal.winner_artist_id)
|
||||
.bind(&mapping.canonical_name)
|
||||
.bind(mapping.source_album_id)
|
||||
.fetch_optional(&mut **tx).await?;
|
||||
|
||||
if let Some((existing_id,)) = conflict {
|
||||
album_merge_into(tx, mapping.source_album_id, existing_id).await?;
|
||||
} else {
|
||||
// Just move to winner artist (only if not already there)
|
||||
sqlx::query(
|
||||
"UPDATE albums SET artist_id = $2 WHERE id = $1 AND artist_id != $2"
|
||||
)
|
||||
.bind(mapping.source_album_id)
|
||||
.bind(proposal.winner_artist_id)
|
||||
.execute(&mut **tx).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Move all remaining albums from each loser to winner, merging name conflicts
|
||||
for &loser_id in loser_ids {
|
||||
loop {
|
||||
// Fetch one album at a time; loop because merging changes the set
|
||||
let album: Option<(i64, String)> = sqlx::query_as(
|
||||
"SELECT id, name FROM albums WHERE artist_id = $1 LIMIT 1"
|
||||
)
|
||||
.bind(loser_id)
|
||||
.fetch_optional(&mut **tx).await?;
|
||||
|
||||
let (album_id, album_name) = match album {
|
||||
Some(a) => a,
|
||||
None => break,
|
||||
};
|
||||
|
||||
let conflict: Option<(i64,)> = sqlx::query_as(
|
||||
"SELECT id FROM albums WHERE artist_id = $1 AND name = $2"
|
||||
)
|
||||
.bind(proposal.winner_artist_id)
|
||||
.bind(&album_name)
|
||||
.fetch_optional(&mut **tx).await?;
|
||||
|
||||
if let Some((existing_id,)) = conflict {
|
||||
// Merge loser album into winner album
|
||||
album_merge_into(tx, album_id, existing_id).await?;
|
||||
} else {
|
||||
sqlx::query("UPDATE albums SET artist_id = $2 WHERE id = $1")
|
||||
.bind(album_id)
|
||||
.bind(proposal.winner_artist_id)
|
||||
.execute(&mut **tx).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Move track_artists from losers to winner
|
||||
for &loser_id in loser_ids {
|
||||
// Remove winner's entries that would conflict after the update
|
||||
sqlx::query(
|
||||
r#"DELETE FROM track_artists
|
||||
WHERE artist_id = $2
|
||||
AND (track_id, role) IN (
|
||||
SELECT track_id, role FROM track_artists WHERE artist_id = $1
|
||||
)"#
|
||||
)
|
||||
.bind(loser_id)
|
||||
.bind(proposal.winner_artist_id)
|
||||
.execute(&mut **tx).await?;
|
||||
|
||||
sqlx::query("UPDATE track_artists SET artist_id = $2 WHERE artist_id = $1")
|
||||
.bind(loser_id)
|
||||
.bind(proposal.winner_artist_id)
|
||||
.execute(&mut **tx).await?;
|
||||
}
|
||||
|
||||
// 5. Delete loser artists (should be empty of albums/tracks by now)
|
||||
for &loser_id in loser_ids {
|
||||
sqlx::query("DELETE FROM artists WHERE id = $1")
|
||||
.bind(loser_id)
|
||||
.execute(&mut **tx).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Merge source album into target within an open transaction:
|
||||
/// deduplicate by file_hash, move the rest, delete source.
|
||||
async fn album_merge_into(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
source_id: i64,
|
||||
target_id: i64,
|
||||
) -> anyhow::Result<()> {
|
||||
// Verify target exists
|
||||
let target_ok: (bool,) = sqlx::query_as("SELECT EXISTS(SELECT 1 FROM albums WHERE id = $1)")
|
||||
.bind(target_id)
|
||||
.fetch_one(&mut **tx).await?;
|
||||
if !target_ok.0 {
|
||||
anyhow::bail!("Target album {} does not exist", target_id);
|
||||
}
|
||||
|
||||
// Delete duplicate tracks from source (same file_hash already in target)
|
||||
let dups: Vec<(i64,)> = sqlx::query_as(
|
||||
r#"SELECT t1.id FROM tracks t1
|
||||
JOIN tracks t2 ON t1.file_hash = t2.file_hash AND t2.album_id = $2
|
||||
WHERE t1.album_id = $1"#
|
||||
)
|
||||
.bind(source_id)
|
||||
.bind(target_id)
|
||||
.fetch_all(&mut **tx).await?;
|
||||
|
||||
for (dup_id,) in dups {
|
||||
// Retrieve path for later file deletion (non-fatal if missing)
|
||||
let path: Option<(String,)> = sqlx::query_as("SELECT storage_path FROM tracks WHERE id = $1")
|
||||
.bind(dup_id).fetch_optional(&mut **tx).await?;
|
||||
if let Some((p,)) = path {
|
||||
// Schedule physical deletion after commit — store in a side channel;
|
||||
// here we do a best-effort remove outside the tx scope via tokio::spawn.
|
||||
let p = p.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = tokio::fs::remove_file(&p).await;
|
||||
});
|
||||
}
|
||||
sqlx::query("DELETE FROM track_artists WHERE track_id = $1").bind(dup_id).execute(&mut **tx).await?;
|
||||
sqlx::query("DELETE FROM tracks WHERE id = $1").bind(dup_id).execute(&mut **tx).await?;
|
||||
}
|
||||
|
||||
// Move remaining tracks from source to target
|
||||
sqlx::query("UPDATE tracks SET album_id = $2 WHERE album_id = $1")
|
||||
.bind(source_id)
|
||||
.bind(target_id)
|
||||
.execute(&mut **tx).await?;
|
||||
|
||||
// Delete the now-empty source album
|
||||
sqlx::query("DELETE FROM albums WHERE id = $1")
|
||||
.bind(source_id)
|
||||
.execute(&mut **tx).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sanitize(name: &str) -> String {
|
||||
name.chars()
|
||||
.map(|c| match c {
|
||||
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' | '\0' => '_',
|
||||
_ => c,
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.trim_matches('.')
|
||||
.to_owned()
|
||||
}
|
||||
1722
furumi-agent/src/web/admin.html
Normal file
1722
furumi-agent/src/web/admin.html
Normal file
File diff suppressed because it is too large
Load Diff
780
furumi-agent/src/web/api.rs
Normal file
780
furumi-agent/src/web/api.rs
Normal file
@@ -0,0 +1,780 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Json},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::db;
|
||||
use super::AppState;
|
||||
|
||||
type S = Arc<AppState>;
|
||||
|
||||
// --- Stats ---
|
||||
|
||||
pub async fn stats(State(state): State<S>) -> impl IntoResponse {
|
||||
match db::get_stats(&state.pool).await {
|
||||
Ok(stats) => (StatusCode::OK, Json(serde_json::to_value(stats).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Queue ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct QueueQuery {
|
||||
#[serde(default)]
|
||||
pub status: Option<String>,
|
||||
#[serde(default = "default_limit")]
|
||||
pub limit: i64,
|
||||
#[serde(default)]
|
||||
pub offset: i64,
|
||||
}
|
||||
|
||||
fn default_limit() -> i64 {
|
||||
50
|
||||
}
|
||||
|
||||
pub async fn list_queue(State(state): State<S>, Query(q): Query<QueueQuery>) -> impl IntoResponse {
|
||||
match db::list_pending(&state.pool, q.status.as_deref(), q.limit, q.offset).await {
|
||||
Ok(items) => (StatusCode::OK, Json(serde_json::to_value(items).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_queue_item(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match db::get_pending(&state.pool, id).await {
|
||||
Ok(Some(item)) => (StatusCode::OK, Json(serde_json::to_value(item).unwrap())).into_response(),
|
||||
Ok(None) => error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_queue_item(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match db::delete_pending(&state.pool, id).await {
|
||||
Ok(true) => StatusCode::NO_CONTENT.into_response(),
|
||||
Ok(false) => error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn approve_queue_item(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
// Get pending track, move file, finalize in DB
|
||||
let pt = match db::get_pending(&state.pool, id).await {
|
||||
Ok(Some(pt)) => pt,
|
||||
Ok(None) => return error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => return error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let artist = pt.norm_artist.as_deref().unwrap_or("Unknown Artist");
|
||||
let album = pt.norm_album.as_deref().unwrap_or("Unknown Album");
|
||||
let title = pt.norm_title.as_deref().unwrap_or("Unknown Title");
|
||||
let source = std::path::Path::new(&pt.inbox_path);
|
||||
let ext = source.extension().and_then(|e| e.to_str()).unwrap_or("flac");
|
||||
let track_num = pt.norm_track_number.unwrap_or(0);
|
||||
|
||||
let filename = if track_num > 0 {
|
||||
format!("{:02} - {}.{}", track_num, sanitize_filename(title), ext)
|
||||
} else {
|
||||
format!("{}.{}", sanitize_filename(title), ext)
|
||||
};
|
||||
|
||||
let artist_dir = sanitize_filename(artist);
|
||||
let album_dir = sanitize_filename(album);
|
||||
let dest = state.config.storage_dir.join(&artist_dir).join(&album_dir).join(&filename);
|
||||
|
||||
use crate::ingest::mover::MoveOutcome;
|
||||
let (storage_path, was_merged) = if dest.exists() && !source.exists() {
|
||||
// File already moved (e.g. auto-approved earlier but DB not finalized)
|
||||
(dest.to_string_lossy().to_string(), false)
|
||||
} else {
|
||||
match crate::ingest::mover::move_to_storage(
|
||||
&state.config.storage_dir, artist, album, &filename, source,
|
||||
).await {
|
||||
Ok(MoveOutcome::Moved(p)) => (p.to_string_lossy().to_string(), false),
|
||||
Ok(MoveOutcome::Merged(p)) => (p.to_string_lossy().to_string(), true),
|
||||
Err(e) => return error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
};
|
||||
|
||||
match db::approve_and_finalize(&state.pool, id, &storage_path).await {
|
||||
Ok(track_id) => {
|
||||
if was_merged {
|
||||
let _ = db::update_pending_status(&state.pool, id, "merged", None).await;
|
||||
}
|
||||
(StatusCode::OK, Json(serde_json::json!({"track_id": track_id}))).into_response()
|
||||
}
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn reject_queue_item(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match db::update_pending_status(&state.pool, id, "rejected", None).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateQueueItem {
|
||||
pub norm_title: Option<String>,
|
||||
pub norm_artist: Option<String>,
|
||||
pub norm_album: Option<String>,
|
||||
pub norm_year: Option<i32>,
|
||||
pub norm_track_number: Option<i32>,
|
||||
pub norm_genre: Option<String>,
|
||||
#[serde(default)]
|
||||
pub featured_artists: Vec<String>,
|
||||
}
|
||||
|
||||
pub async fn update_queue_item(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<Uuid>,
|
||||
Json(body): Json<UpdateQueueItem>,
|
||||
) -> impl IntoResponse {
|
||||
let norm = db::NormalizedFields {
|
||||
title: body.norm_title,
|
||||
artist: body.norm_artist,
|
||||
album: body.norm_album,
|
||||
year: body.norm_year,
|
||||
track_number: body.norm_track_number,
|
||||
genre: body.norm_genre,
|
||||
featured_artists: body.featured_artists,
|
||||
release_type: None,
|
||||
confidence: Some(1.0), // manual edit = full confidence
|
||||
notes: Some("Manually edited".to_owned()),
|
||||
};
|
||||
|
||||
match db::update_pending_normalized(&state.pool, id, "review", &norm, None).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Retry ---
|
||||
|
||||
pub async fn retry_queue_item(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match db::update_pending_status(&state.pool, id, "pending", None).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Batch operations ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BatchIds {
|
||||
pub ids: Vec<Uuid>,
|
||||
}
|
||||
|
||||
pub async fn batch_approve(State(state): State<S>, Json(body): Json<BatchIds>) -> impl IntoResponse {
|
||||
let mut ok = 0u32;
|
||||
let mut errors = Vec::new();
|
||||
for id in &body.ids {
|
||||
let pt = match db::get_pending(&state.pool, *id).await {
|
||||
Ok(Some(pt)) => pt,
|
||||
Ok(None) => { errors.push(format!("{}: not found", id)); continue; }
|
||||
Err(e) => { errors.push(format!("{}: {}", id, e)); continue; }
|
||||
};
|
||||
|
||||
let artist = pt.norm_artist.as_deref().unwrap_or("Unknown Artist");
|
||||
let album = pt.norm_album.as_deref().unwrap_or("Unknown Album");
|
||||
let title = pt.norm_title.as_deref().unwrap_or("Unknown Title");
|
||||
let source = std::path::Path::new(&pt.inbox_path);
|
||||
let ext = source.extension().and_then(|e| e.to_str()).unwrap_or("flac");
|
||||
let track_num = pt.norm_track_number.unwrap_or(0);
|
||||
|
||||
let filename = if track_num > 0 {
|
||||
format!("{:02} - {}.{}", track_num, sanitize_filename(title), ext)
|
||||
} else {
|
||||
format!("{}.{}", sanitize_filename(title), ext)
|
||||
};
|
||||
|
||||
let artist_dir = sanitize_filename(artist);
|
||||
let album_dir = sanitize_filename(album);
|
||||
let dest = state.config.storage_dir.join(&artist_dir).join(&album_dir).join(&filename);
|
||||
|
||||
use crate::ingest::mover::MoveOutcome;
|
||||
let (rel_path, was_merged) = if dest.exists() && !source.exists() {
|
||||
(dest.to_string_lossy().to_string(), false)
|
||||
} else {
|
||||
match crate::ingest::mover::move_to_storage(
|
||||
&state.config.storage_dir, artist, album, &filename, source,
|
||||
).await {
|
||||
Ok(MoveOutcome::Moved(p)) => (p.to_string_lossy().to_string(), false),
|
||||
Ok(MoveOutcome::Merged(p)) => (p.to_string_lossy().to_string(), true),
|
||||
Err(e) => { errors.push(format!("{}: {}", id, e)); continue; }
|
||||
}
|
||||
};
|
||||
|
||||
match db::approve_and_finalize(&state.pool, *id, &rel_path).await {
|
||||
Ok(_) => {
|
||||
if was_merged {
|
||||
let _ = db::update_pending_status(&state.pool, *id, "merged", None).await;
|
||||
}
|
||||
ok += 1;
|
||||
}
|
||||
Err(e) => errors.push(format!("{}: {}", id, e)),
|
||||
}
|
||||
}
|
||||
(StatusCode::OK, Json(serde_json::json!({"approved": ok, "errors": errors}))).into_response()
|
||||
}
|
||||
|
||||
pub async fn batch_reject(State(state): State<S>, Json(body): Json<BatchIds>) -> impl IntoResponse {
|
||||
let mut ok = 0u32;
|
||||
for id in &body.ids {
|
||||
if db::update_pending_status(&state.pool, *id, "rejected", None).await.is_ok() {
|
||||
ok += 1;
|
||||
}
|
||||
}
|
||||
(StatusCode::OK, Json(serde_json::json!({"rejected": ok}))).into_response()
|
||||
}
|
||||
|
||||
pub async fn batch_retry(State(state): State<S>, Json(body): Json<BatchIds>) -> impl IntoResponse {
|
||||
let mut ok = 0u32;
|
||||
for id in &body.ids {
|
||||
if db::update_pending_status(&state.pool, *id, "pending", None).await.is_ok() {
|
||||
ok += 1;
|
||||
}
|
||||
}
|
||||
(StatusCode::OK, Json(serde_json::json!({"retried": ok}))).into_response()
|
||||
}
|
||||
|
||||
pub async fn batch_delete(State(state): State<S>, Json(body): Json<BatchIds>) -> impl IntoResponse {
|
||||
let mut ok = 0u32;
|
||||
for id in &body.ids {
|
||||
if db::delete_pending(&state.pool, *id).await.unwrap_or(false) {
|
||||
ok += 1;
|
||||
}
|
||||
}
|
||||
(StatusCode::OK, Json(serde_json::json!({"deleted": ok}))).into_response()
|
||||
}
|
||||
|
||||
// --- Artists ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SearchArtistsQuery {
|
||||
pub q: String,
|
||||
#[serde(default = "default_search_limit")]
|
||||
pub limit: i32,
|
||||
}
|
||||
|
||||
fn default_search_limit() -> i32 {
|
||||
10
|
||||
}
|
||||
|
||||
pub async fn search_artists(State(state): State<S>, Query(q): Query<SearchArtistsQuery>) -> impl IntoResponse {
|
||||
if q.q.is_empty() {
|
||||
return (StatusCode::OK, Json(serde_json::json!([]))).into_response();
|
||||
}
|
||||
match db::find_similar_artists(&state.pool, &q.q, q.limit).await {
|
||||
Ok(artists) => (StatusCode::OK, Json(serde_json::to_value(artists).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_artists(State(state): State<S>) -> impl IntoResponse {
|
||||
match db::list_artists_all(&state.pool).await {
|
||||
Ok(artists) => (StatusCode::OK, Json(serde_json::to_value(artists).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateArtistBody {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
pub async fn update_artist(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<i64>,
|
||||
Json(body): Json<UpdateArtistBody>,
|
||||
) -> impl IntoResponse {
|
||||
match db::update_artist_name(&state.pool, id, &body.name).await {
|
||||
Ok(true) => StatusCode::NO_CONTENT.into_response(),
|
||||
Ok(false) => error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Albums ---
|
||||
|
||||
pub async fn list_albums(State(state): State<S>, Path(artist_id): Path<i64>) -> impl IntoResponse {
|
||||
match db::list_albums_by_artist(&state.pool, artist_id).await {
|
||||
Ok(albums) => (StatusCode::OK, Json(serde_json::to_value(albums).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateAlbumBody {
|
||||
pub name: String,
|
||||
pub year: Option<i32>,
|
||||
}
|
||||
|
||||
pub async fn update_album(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<i64>,
|
||||
Json(body): Json<UpdateAlbumBody>,
|
||||
) -> impl IntoResponse {
|
||||
match db::update_album(&state.pool, id, &body.name, body.year).await {
|
||||
Ok(true) => StatusCode::NO_CONTENT.into_response(),
|
||||
Ok(false) => error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Merges ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateMergeBody {
|
||||
pub artist_ids: Vec<i64>,
|
||||
}
|
||||
|
||||
pub async fn create_merge(State(state): State<S>, Json(body): Json<CreateMergeBody>) -> impl IntoResponse {
|
||||
if body.artist_ids.len() < 2 {
|
||||
return error_response(StatusCode::BAD_REQUEST, "need at least 2 artists to merge");
|
||||
}
|
||||
match db::insert_artist_merge(&state.pool, &body.artist_ids).await {
|
||||
Ok(id) => (StatusCode::OK, Json(serde_json::json!({"id": id}))).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_merges(State(state): State<S>) -> impl IntoResponse {
|
||||
match db::list_artist_merges(&state.pool).await {
|
||||
Ok(items) => (StatusCode::OK, Json(serde_json::to_value(items).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_merge(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
let merge = match db::get_artist_merge(&state.pool, id).await {
|
||||
Ok(Some(m)) => m,
|
||||
Ok(None) => return error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => return error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let source_ids: Vec<i64> = serde_json::from_str(&merge.source_artist_ids).unwrap_or_default();
|
||||
let artists = match db::get_artists_full_data(&state.pool, &source_ids).await {
|
||||
Ok(a) => a,
|
||||
Err(e) => return error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let proposal: Option<serde_json::Value> = merge.proposal.as_deref()
|
||||
.and_then(|p| serde_json::from_str(p).ok());
|
||||
|
||||
(StatusCode::OK, Json(serde_json::json!({
|
||||
"merge": {
|
||||
"id": merge.id,
|
||||
"status": merge.status,
|
||||
"source_artist_ids": source_ids,
|
||||
"llm_notes": merge.llm_notes,
|
||||
"error_message": merge.error_message,
|
||||
"created_at": merge.created_at,
|
||||
"updated_at": merge.updated_at,
|
||||
},
|
||||
"artists": artists,
|
||||
"proposal": proposal,
|
||||
}))).into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateMergeBody {
|
||||
pub proposal: serde_json::Value,
|
||||
}
|
||||
|
||||
pub async fn update_merge(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<Uuid>,
|
||||
Json(body): Json<UpdateMergeBody>,
|
||||
) -> impl IntoResponse {
|
||||
let notes = body.proposal.get("notes")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("")
|
||||
.to_owned();
|
||||
let proposal_json = match serde_json::to_string(&body.proposal) {
|
||||
Ok(s) => s,
|
||||
Err(e) => return error_response(StatusCode::BAD_REQUEST, &e.to_string()),
|
||||
};
|
||||
match db::update_merge_proposal(&state.pool, id, &proposal_json, ¬es).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn approve_merge(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match crate::merge::execute_merge(&state, id).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => {
|
||||
let msg = e.to_string();
|
||||
let _ = db::update_merge_status(&state.pool, id, "error", Some(&msg)).await;
|
||||
error_response(StatusCode::INTERNAL_SERVER_ERROR, &msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn reject_merge(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match db::update_merge_status(&state.pool, id, "rejected", None).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn retry_merge(State(state): State<S>, Path(id): Path<Uuid>) -> impl IntoResponse {
|
||||
match db::update_merge_status(&state.pool, id, "pending", None).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Library search ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LibraryQuery {
|
||||
#[serde(default)]
|
||||
pub q: String,
|
||||
#[serde(default)]
|
||||
pub artist: String,
|
||||
#[serde(default)]
|
||||
pub album: String,
|
||||
#[serde(default = "default_lib_limit")]
|
||||
pub limit: i64,
|
||||
#[serde(default)]
|
||||
pub offset: i64,
|
||||
}
|
||||
fn default_lib_limit() -> i64 { 50 }
|
||||
|
||||
pub async fn library_tracks(State(state): State<S>, Query(q): Query<LibraryQuery>) -> impl IntoResponse {
|
||||
let (tracks, total) = tokio::join!(
|
||||
db::search_tracks(&state.pool, &q.q, &q.artist, &q.album, q.limit, q.offset),
|
||||
db::count_tracks(&state.pool, &q.q, &q.artist, &q.album),
|
||||
);
|
||||
match (tracks, total) {
|
||||
(Ok(rows), Ok(n)) => (StatusCode::OK, Json(serde_json::json!({"total": n, "items": rows}))).into_response(),
|
||||
(Err(e), _) | (_, Err(e)) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn library_albums(State(state): State<S>, Query(q): Query<LibraryQuery>) -> impl IntoResponse {
|
||||
let (albums, total) = tokio::join!(
|
||||
db::search_albums(&state.pool, &q.q, &q.artist, q.limit, q.offset),
|
||||
db::count_albums(&state.pool, &q.q, &q.artist),
|
||||
);
|
||||
match (albums, total) {
|
||||
(Ok(rows), Ok(n)) => (StatusCode::OK, Json(serde_json::json!({"total": n, "items": rows}))).into_response(),
|
||||
(Err(e), _) | (_, Err(e)) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn library_artists(State(state): State<S>, Query(q): Query<LibraryQuery>) -> impl IntoResponse {
|
||||
let (artists, total) = tokio::join!(
|
||||
db::search_artists_lib(&state.pool, &q.q, q.limit, q.offset),
|
||||
db::count_artists_lib(&state.pool, &q.q),
|
||||
);
|
||||
match (artists, total) {
|
||||
(Ok(rows), Ok(n)) => (StatusCode::OK, Json(serde_json::json!({"total": n, "items": rows}))).into_response(),
|
||||
(Err(e), _) | (_, Err(e)) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Track / Album detail & edit ---
|
||||
|
||||
pub async fn get_track(State(state): State<S>, Path(id): Path<i64>) -> impl IntoResponse {
|
||||
match db::get_track_full(&state.pool, id).await {
|
||||
Ok(Some(t)) => (StatusCode::OK, Json(serde_json::to_value(t).unwrap())).into_response(),
|
||||
Ok(None) => error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_track(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<i64>,
|
||||
Json(body): Json<db::TrackUpdateFields>,
|
||||
) -> impl IntoResponse {
|
||||
match db::update_track_metadata(&state.pool, id, &body).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_album_full(State(state): State<S>, Path(id): Path<i64>) -> impl IntoResponse {
|
||||
match db::get_album_details(&state.pool, id).await {
|
||||
Ok(Some(a)) => (StatusCode::OK, Json(serde_json::to_value(a).unwrap())).into_response(),
|
||||
Ok(None) => error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AlbumUpdateBody {
|
||||
pub name: String,
|
||||
pub year: Option<i32>,
|
||||
pub artist_id: i64,
|
||||
}
|
||||
|
||||
pub async fn update_album_full(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<i64>,
|
||||
Json(body): Json<AlbumUpdateBody>,
|
||||
) -> impl IntoResponse {
|
||||
match db::update_album_full(&state.pool, id, &body.name, body.year, body.artist_id).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SetGenreBody { pub genre: String }
|
||||
|
||||
pub async fn set_album_tracks_genre(
|
||||
State(state): State<S>,
|
||||
Path(id): Path<i64>,
|
||||
Json(body): Json<SetGenreBody>,
|
||||
) -> impl IntoResponse {
|
||||
match db::set_album_tracks_genre(&state.pool, id, &body.genre).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ReorderBody {
|
||||
pub orders: Vec<(i64, i32)>,
|
||||
}
|
||||
|
||||
pub async fn reorder_album_tracks(
|
||||
State(state): State<S>,
|
||||
Path(_id): Path<i64>,
|
||||
Json(body): Json<ReorderBody>,
|
||||
) -> impl IntoResponse {
|
||||
match db::reorder_tracks(&state.pool, &body.orders).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cover by artist+album name — used for queue items that may not have an album_id yet.
|
||||
#[derive(Deserialize)]
|
||||
pub struct CoverByNameQuery {
|
||||
#[serde(default)] pub artist: String,
|
||||
#[serde(default)] pub name: String,
|
||||
}
|
||||
pub async fn album_cover_by_name(State(state): State<S>, Query(q): Query<CoverByNameQuery>) -> impl IntoResponse {
|
||||
let album_id = match db::find_album_id(&state.pool, &q.artist, &q.name).await {
|
||||
Ok(Some(id)) => id,
|
||||
_ => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
album_cover_by_id(&state, album_id).await
|
||||
}
|
||||
|
||||
pub async fn album_cover(State(state): State<S>, Path(id): Path<i64>) -> impl IntoResponse {
|
||||
album_cover_by_id(&state, id).await
|
||||
}
|
||||
|
||||
async fn album_cover_by_id(state: &super::AppState, id: i64) -> axum::response::Response {
|
||||
// 1. Try album_images table
|
||||
if let Ok(Some((file_path, mime_type))) = db::get_album_cover(&state.pool, id).await {
|
||||
if let Ok(bytes) = tokio::fs::read(&file_path).await {
|
||||
return ([(axum::http::header::CONTENT_TYPE, mime_type)], bytes).into_response();
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Fallback: extract embedded cover from first track in album
|
||||
if let Ok(Some(track_path)) = db::get_album_first_track_path(&state.pool, id).await {
|
||||
let path = std::path::PathBuf::from(track_path);
|
||||
if path.exists() {
|
||||
let result = tokio::task::spawn_blocking(move || extract_embedded_cover(&path)).await;
|
||||
if let Ok(Some((bytes, mime))) = result {
|
||||
return ([(axum::http::header::CONTENT_TYPE, mime)], bytes).into_response();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
StatusCode::NOT_FOUND.into_response()
|
||||
}
|
||||
|
||||
fn extract_embedded_cover(path: &std::path::Path) -> Option<(Vec<u8>, String)> {
|
||||
use symphonia::core::{
|
||||
formats::FormatOptions,
|
||||
io::MediaSourceStream,
|
||||
meta::MetadataOptions,
|
||||
probe::Hint,
|
||||
};
|
||||
|
||||
let file = std::fs::File::open(path).ok()?;
|
||||
let mss = MediaSourceStream::new(Box::new(file), Default::default());
|
||||
|
||||
let mut hint = Hint::new();
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
|
||||
let mut probed = symphonia::default::get_probe()
|
||||
.format(
|
||||
&hint,
|
||||
mss,
|
||||
&FormatOptions { enable_gapless: false, ..Default::default() },
|
||||
&MetadataOptions::default(),
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
if let Some(rev) = probed.metadata.get().as_ref().and_then(|m| m.current()) {
|
||||
if let Some(v) = rev.visuals().first() {
|
||||
return Some((v.data.to_vec(), v.media_type.clone()));
|
||||
}
|
||||
}
|
||||
if let Some(rev) = probed.format.metadata().current() {
|
||||
if let Some(v) = rev.visuals().first() {
|
||||
return Some((v.data.to_vec(), v.media_type.clone()));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AlbumSearchQuery {
|
||||
#[serde(default)]
|
||||
pub q: String,
|
||||
pub artist_id: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn search_albums_for_artist(State(state): State<S>, Query(q): Query<AlbumSearchQuery>) -> impl IntoResponse {
|
||||
match db::search_albums_for_artist(&state.pool, &q.q, q.artist_id).await {
|
||||
Ok(items) => (StatusCode::OK, Json(serde_json::to_value(
|
||||
items.iter().map(|(id, name)| serde_json::json!({"id": id, "name": name})).collect::<Vec<_>>()
|
||||
).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Artist full admin form ---
|
||||
|
||||
pub async fn get_artist_full(State(state): State<S>, Path(id): Path<i64>) -> impl IntoResponse {
|
||||
let artist = match db::get_artist_by_id(&state.pool, id).await {
|
||||
Ok(Some(a)) => a,
|
||||
Ok(None) => return error_response(StatusCode::NOT_FOUND, "not found"),
|
||||
Err(e) => return error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
let (albums, appearances) = tokio::join!(
|
||||
db::get_artist_albums(&state.pool, id),
|
||||
db::get_artist_appearances(&state.pool, id),
|
||||
);
|
||||
// For each album, load tracks
|
||||
let albums = match albums {
|
||||
Ok(a) => a,
|
||||
Err(e) => return error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
let mut albums_with_tracks = Vec::new();
|
||||
for album in albums {
|
||||
let tracks = db::get_album_tracks_admin(&state.pool, album.id).await.unwrap_or_default();
|
||||
albums_with_tracks.push(serde_json::json!({
|
||||
"id": album.id, "name": album.name, "year": album.year,
|
||||
"release_type": album.release_type, "hidden": album.hidden,
|
||||
"track_count": album.track_count, "tracks": tracks,
|
||||
}));
|
||||
}
|
||||
(StatusCode::OK, Json(serde_json::json!({
|
||||
"artist": artist,
|
||||
"albums": albums_with_tracks,
|
||||
"appearances": appearances.unwrap_or_default(),
|
||||
}))).into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SetHiddenBody { pub hidden: bool }
|
||||
|
||||
pub async fn set_track_hidden(State(state): State<S>, Path(id): Path<i64>, Json(b): Json<SetHiddenBody>) -> impl IntoResponse {
|
||||
match db::set_track_hidden(&state.pool, id, b.hidden).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set_album_hidden(State(state): State<S>, Path(id): Path<i64>, Json(b): Json<SetHiddenBody>) -> impl IntoResponse {
|
||||
match db::set_album_hidden(&state.pool, id, b.hidden).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set_artist_hidden(State(state): State<S>, Path(id): Path<i64>, Json(b): Json<SetHiddenBody>) -> impl IntoResponse {
|
||||
match db::set_artist_hidden(&state.pool, id, b.hidden).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SetReleaseTypeBody { pub release_type: String }
|
||||
|
||||
pub async fn set_album_release_type(State(state): State<S>, Path(id): Path<i64>, Json(b): Json<SetReleaseTypeBody>) -> impl IntoResponse {
|
||||
let valid = ["album","single","ep","compilation","live"];
|
||||
if !valid.contains(&b.release_type.as_str()) {
|
||||
return error_response(StatusCode::BAD_REQUEST, "invalid release_type");
|
||||
}
|
||||
match db::set_album_release_type(&state.pool, id, &b.release_type).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RenameArtistBody { pub name: String }
|
||||
|
||||
pub async fn rename_artist_api(State(state): State<S>, Path(id): Path<i64>, Json(b): Json<RenameArtistBody>) -> impl IntoResponse {
|
||||
match db::rename_artist_name(&state.pool, id, &b.name).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AddAppearanceBody { pub track_id: i64 }
|
||||
|
||||
pub async fn add_appearance(State(state): State<S>, Path(artist_id): Path<i64>, Json(b): Json<AddAppearanceBody>) -> impl IntoResponse {
|
||||
match db::add_track_appearance(&state.pool, b.track_id, artist_id).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn remove_appearance(State(state): State<S>, Path((artist_id, track_id)): Path<(i64, i64)>) -> impl IntoResponse {
|
||||
match db::remove_track_appearance(&state.pool, track_id, artist_id).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SearchTracksQuery { #[serde(default)] pub q: String }
|
||||
|
||||
pub async fn search_tracks_feat(State(state): State<S>, Query(q): Query<SearchTracksQuery>) -> impl IntoResponse {
|
||||
match db::search_tracks_for_feat(&state.pool, &q.q).await {
|
||||
Ok(rows) => (StatusCode::OK, Json(serde_json::to_value(
|
||||
rows.iter().map(|(id, title, artist)| serde_json::json!({"id": id, "title": title, "artist_name": artist})).collect::<Vec<_>>()
|
||||
).unwrap())).into_response(),
|
||||
Err(e) => error_response(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
fn error_response(status: StatusCode, message: &str) -> axum::response::Response {
|
||||
(status, Json(serde_json::json!({"error": message}))).into_response()
|
||||
}
|
||||
|
||||
fn sanitize_filename(name: &str) -> String {
|
||||
name.chars()
|
||||
.map(|c| match c {
|
||||
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
|
||||
_ => c,
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.to_owned()
|
||||
}
|
||||
70
furumi-agent/src/web/mod.rs
Normal file
70
furumi-agent/src/web/mod.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
pub mod api;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{Router, routing::{delete, get, post, put}};
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::config::Args;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub pool: PgPool,
|
||||
pub config: Arc<Args>,
|
||||
pub system_prompt: Arc<String>,
|
||||
pub merge_prompt: Arc<String>,
|
||||
}
|
||||
|
||||
pub fn build_router(state: Arc<AppState>) -> Router {
|
||||
let api = Router::new()
|
||||
.route("/stats", get(api::stats))
|
||||
.route("/queue", get(api::list_queue))
|
||||
.route("/queue/:id", get(api::get_queue_item).delete(api::delete_queue_item))
|
||||
.route("/queue/:id/approve", post(api::approve_queue_item))
|
||||
.route("/queue/:id/reject", post(api::reject_queue_item))
|
||||
.route("/queue/:id/retry", post(api::retry_queue_item))
|
||||
.route("/queue/:id/update", put(api::update_queue_item))
|
||||
.route("/queue/batch/approve", post(api::batch_approve))
|
||||
.route("/queue/batch/reject", post(api::batch_reject))
|
||||
.route("/queue/batch/retry", post(api::batch_retry))
|
||||
.route("/queue/batch/delete", post(api::batch_delete))
|
||||
.route("/artists/search", get(api::search_artists))
|
||||
.route("/artists", get(api::list_artists))
|
||||
.route("/artists/:id", put(api::update_artist))
|
||||
.route("/artists/:id/albums", get(api::list_albums))
|
||||
.route("/artists/:id/full", get(api::get_artist_full))
|
||||
.route("/artists/:id/hidden", put(api::set_artist_hidden))
|
||||
.route("/artists/:id/rename", put(api::rename_artist_api))
|
||||
.route("/artists/:id/appearances", post(api::add_appearance))
|
||||
.route("/artists/:id/appearances/:track_id", delete(api::remove_appearance))
|
||||
.route("/tracks/search", get(api::search_tracks_feat))
|
||||
.route("/tracks/:id", get(api::get_track).put(api::update_track))
|
||||
.route("/tracks/:id/hidden", put(api::set_track_hidden))
|
||||
.route("/albums/search", get(api::search_albums_for_artist))
|
||||
.route("/albums/cover-by-name", get(api::album_cover_by_name))
|
||||
.route("/albums/:id/cover", get(api::album_cover))
|
||||
.route("/albums/:id/full", get(api::get_album_full))
|
||||
.route("/albums/:id/reorder", put(api::reorder_album_tracks))
|
||||
.route("/albums/:id/edit", put(api::update_album_full))
|
||||
.route("/albums/:id/genre", put(api::set_album_tracks_genre))
|
||||
.route("/albums/:id/hidden", put(api::set_album_hidden))
|
||||
.route("/albums/:id/release_type", put(api::set_album_release_type))
|
||||
.route("/albums/:id", put(api::update_album))
|
||||
.route("/merges", get(api::list_merges).post(api::create_merge))
|
||||
.route("/merges/:id", get(api::get_merge).put(api::update_merge))
|
||||
.route("/merges/:id/approve", post(api::approve_merge))
|
||||
.route("/merges/:id/reject", post(api::reject_merge))
|
||||
.route("/merges/:id/retry", post(api::retry_merge))
|
||||
.route("/library/tracks", get(api::library_tracks))
|
||||
.route("/library/albums", get(api::library_albums))
|
||||
.route("/library/artists", get(api::library_artists));
|
||||
|
||||
Router::new()
|
||||
.route("/", get(admin_html))
|
||||
.nest("/api", api)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn admin_html() -> axum::response::Html<&'static str> {
|
||||
axum::response::Html(include_str!("admin.html"))
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "furumi-client-core"
|
||||
version = "0.2.1"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "furumi-common"
|
||||
version = "0.2.1"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "furumi-mount-linux"
|
||||
version = "0.2.1"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
@@ -15,3 +15,16 @@ tracing-subscriber = { version = "0.3.22", features = ["env-filter"] }
|
||||
tokio = { version = "1.50.0", features = ["full"] }
|
||||
tokio-stream = "0.1.18"
|
||||
ctrlc = "3.5.2"
|
||||
|
||||
[package.metadata.deb]
|
||||
maintainer = "Furumi"
|
||||
copyright = "Furumi contributors"
|
||||
extended-description = "Furumi-ng: mount remote filesystem via encrypted gRPC + FUSE"
|
||||
depends = "fuse3"
|
||||
section = "utils"
|
||||
priority = "optional"
|
||||
maintainer-scripts = "debian/"
|
||||
assets = [
|
||||
{ source = "target/release/furumi-mount-linux", dest = "usr/bin/furumi-mount-linux", mode = "755" },
|
||||
{ source = "debian/furumi-mount.service", dest = "usr/lib/systemd/user/furumi-mount.service", mode = "644" },
|
||||
]
|
||||
|
||||
15
furumi-mount-linux/debian/furumi-mount.service
Normal file
15
furumi-mount-linux/debian/furumi-mount.service
Normal file
@@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=Furumi remote filesystem mount
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
EnvironmentFile=%h/.config/furumi/config
|
||||
ExecStart=/usr/bin/furumi-mount-linux
|
||||
ExecStopPost=fusermount3 -uz ${FURUMI_MOUNT}
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
36
furumi-mount-linux/debian/postinst
Normal file
36
furumi-mount-linux/debian/postinst
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
if [ "$1" = "configure" ]; then
|
||||
if [ -n "$SUDO_USER" ] && [ "$SUDO_USER" != "root" ]; then
|
||||
REAL_USER="$SUDO_USER"
|
||||
REAL_HOME=$(getent passwd "$SUDO_USER" | cut -d: -f6)
|
||||
|
||||
CONFIG_DIR="$REAL_HOME/.config/furumi"
|
||||
CONFIG_FILE="$CONFIG_DIR/config"
|
||||
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
mkdir -p "$CONFIG_DIR"
|
||||
cat > "$CONFIG_FILE" << 'EOF'
|
||||
# Furumi mount configuration
|
||||
# Edit the values below, then enable and start the service:
|
||||
#
|
||||
# systemctl --user enable --now furumi-mount.service
|
||||
#
|
||||
# To apply changes after editing this file:
|
||||
#
|
||||
# systemctl --user restart furumi-mount.service
|
||||
|
||||
FURUMI_SERVER=your-server:50051
|
||||
FURUMI_TOKEN=your-token-here
|
||||
FURUMI_MOUNT=/path/to/mountpoint
|
||||
EOF
|
||||
chown -R "$REAL_USER:$REAL_USER" "$CONFIG_DIR"
|
||||
echo ""
|
||||
echo "furumi-mount: config created at $CONFIG_FILE"
|
||||
echo "furumi-mount: edit the file, then run:"
|
||||
echo " systemctl --user enable --now furumi-mount.service"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -57,7 +57,14 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
};
|
||||
|
||||
let client = rt.block_on(async {
|
||||
FurumiClient::connect(&full_addr, &args.token).await
|
||||
let c = FurumiClient::connect(&full_addr, &args.token).await?;
|
||||
|
||||
// Ping the server to verify connection and authentication token
|
||||
if let Err(e) = c.get_attr("/").await {
|
||||
return Err(format!("Failed to authenticate or connect to server: {}", e).into());
|
||||
}
|
||||
|
||||
Ok::<_, Box<dyn std::error::Error>>(c)
|
||||
})?;
|
||||
|
||||
let fuse_fs = fs::FurumiFuse::new(client, rt.handle().clone());
|
||||
@@ -68,7 +75,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
MountOption::NoExec, // Better security for media mount
|
||||
];
|
||||
|
||||
println!("Mounting Furumi-ng to {:?}", args.mount);
|
||||
println!("Mounting Furumi-ng v{} to {:?}", option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION")), args.mount);
|
||||
|
||||
// Use Session + BackgroundSession for graceful unmount on exit
|
||||
let session = Session::new(fuse_fs, &args.mount, &options)?;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "furumi-mount-macos"
|
||||
version = "0.2.1"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -108,7 +108,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
println!("Mounted Furumi-ng to {:?}", mount_path);
|
||||
println!("Mounted Furumi-ng v{} to {:?}", option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION")), mount_path);
|
||||
|
||||
// Wait for shutdown signal
|
||||
while running.load(Ordering::SeqCst) {
|
||||
@@ -116,7 +116,12 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
}
|
||||
|
||||
// Unmount
|
||||
let _ = Command::new("umount").arg(mount_point_umount.to_string_lossy().as_ref()).status();
|
||||
let _ = Command::new("diskutil")
|
||||
.arg("unmount")
|
||||
.arg("force")
|
||||
.arg(mount_point_umount.to_string_lossy().as_ref())
|
||||
.status();
|
||||
|
||||
handle.abort();
|
||||
println!("Unmounted successfully.");
|
||||
});
|
||||
|
||||
19
furumi-node-player/README.md
Normal file
19
furumi-node-player/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# auth-app
|
||||
|
||||
Проект состоит из двух частей:
|
||||
|
||||
- `client` - Vite + React + TypeScript
|
||||
- `server` - Express + TypeScript + OIDC авторизация
|
||||
|
||||
## Запуск
|
||||
|
||||
1. Скопируй `server/.env.example` в `server/.env` и заполни OIDC параметры.
|
||||
- Если нужно запустить без авторизации, поставь `DISABLE_AUTH=true` (OIDC параметры тогда не требуются).
|
||||
2. В одном терминале:
|
||||
- `cd server`
|
||||
- `npm run dev`
|
||||
3. В другом терминале:
|
||||
- `cd client`
|
||||
- `npm run dev`
|
||||
|
||||
Клиент откроется на `http://localhost:5173`, сервер на `http://localhost:3001`.
|
||||
24
furumi-node-player/client/.gitignore
vendored
Normal file
24
furumi-node-player/client/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
73
furumi-node-player/client/README.md
Normal file
73
furumi-node-player/client/README.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# React + TypeScript + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Oxc](https://oxc.rs)
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/)
|
||||
|
||||
## React Compiler
|
||||
|
||||
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
|
||||
|
||||
## Expanding the ESLint configuration
|
||||
|
||||
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
|
||||
|
||||
```js
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
// Other configs...
|
||||
|
||||
// Remove tseslint.configs.recommended and replace with this
|
||||
tseslint.configs.recommendedTypeChecked,
|
||||
// Alternatively, use this for stricter rules
|
||||
tseslint.configs.strictTypeChecked,
|
||||
// Optionally, add this for stylistic rules
|
||||
tseslint.configs.stylisticTypeChecked,
|
||||
|
||||
// Other configs...
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
// other options...
|
||||
},
|
||||
},
|
||||
])
|
||||
```
|
||||
|
||||
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
|
||||
|
||||
```js
|
||||
// eslint.config.js
|
||||
import reactX from 'eslint-plugin-react-x'
|
||||
import reactDom from 'eslint-plugin-react-dom'
|
||||
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
// Other configs...
|
||||
// Enable lint rules for React
|
||||
reactX.configs['recommended-typescript'],
|
||||
// Enable lint rules for React DOM
|
||||
reactDom.configs.recommended,
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
// other options...
|
||||
},
|
||||
},
|
||||
])
|
||||
```
|
||||
23
furumi-node-player/client/eslint.config.js
Normal file
23
furumi-node-player/client/eslint.config.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import tseslint from 'typescript-eslint'
|
||||
import { defineConfig, globalIgnores } from 'eslint/config'
|
||||
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
js.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
reactHooks.configs.flat.recommended,
|
||||
reactRefresh.configs.vite,
|
||||
],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
},
|
||||
},
|
||||
])
|
||||
13
furumi-node-player/client/index.html
Normal file
13
furumi-node-player/client/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>client</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
2965
furumi-node-player/client/package-lock.json
generated
Normal file
2965
furumi-node-player/client/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
30
furumi-node-player/client/package.json
Normal file
30
furumi-node-player/client/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "client",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.4",
|
||||
"@types/node": "^24.12.0",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^6.0.1",
|
||||
"eslint": "^9.39.4",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.5.2",
|
||||
"globals": "^17.4.0",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.57.0",
|
||||
"vite": "^8.0.1"
|
||||
}
|
||||
}
|
||||
1
furumi-node-player/client/public/favicon.svg
Normal file
1
furumi-node-player/client/public/favicon.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 9.3 KiB |
24
furumi-node-player/client/public/icons.svg
Normal file
24
furumi-node-player/client/public/icons.svg
Normal file
@@ -0,0 +1,24 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<symbol id="bluesky-icon" viewBox="0 0 16 17">
|
||||
<g clip-path="url(#bluesky-clip)"><path fill="#08060d" d="M7.75 7.735c-.693-1.348-2.58-3.86-4.334-5.097-1.68-1.187-2.32-.981-2.74-.79C.188 2.065.1 2.812.1 3.251s.241 3.602.398 4.13c.52 1.744 2.367 2.333 4.07 2.145-2.495.37-4.71 1.278-1.805 4.512 3.196 3.309 4.38-.71 4.987-2.746.608 2.036 1.307 5.91 4.93 2.746 2.72-2.746.747-4.143-1.747-4.512 1.702.189 3.55-.4 4.07-2.145.156-.528.397-3.691.397-4.13s-.088-1.186-.575-1.406c-.42-.19-1.06-.395-2.741.79-1.755 1.24-3.64 3.752-4.334 5.099"/></g>
|
||||
<defs><clipPath id="bluesky-clip"><path fill="#fff" d="M.1.85h15.3v15.3H.1z"/></clipPath></defs>
|
||||
</symbol>
|
||||
<symbol id="discord-icon" viewBox="0 0 20 19">
|
||||
<path fill="#08060d" d="M16.224 3.768a14.5 14.5 0 0 0-3.67-1.153c-.158.286-.343.67-.47.976a13.5 13.5 0 0 0-4.067 0c-.128-.306-.317-.69-.476-.976A14.4 14.4 0 0 0 3.868 3.77C1.546 7.28.916 10.703 1.231 14.077a14.7 14.7 0 0 0 4.5 2.306q.545-.748.965-1.587a9.5 9.5 0 0 1-1.518-.74q.191-.14.372-.293c2.927 1.369 6.107 1.369 8.999 0q.183.152.372.294-.723.437-1.52.74.418.838.963 1.588a14.6 14.6 0 0 0 4.504-2.308c.37-3.911-.63-7.302-2.644-10.309m-9.13 8.234c-.878 0-1.599-.82-1.599-1.82 0-.998.705-1.82 1.6-1.82.894 0 1.614.82 1.599 1.82.001 1-.705 1.82-1.6 1.82m5.91 0c-.878 0-1.599-.82-1.599-1.82 0-.998.705-1.82 1.6-1.82.893 0 1.614.82 1.599 1.82 0 1-.706 1.82-1.6 1.82"/>
|
||||
</symbol>
|
||||
<symbol id="documentation-icon" viewBox="0 0 21 20">
|
||||
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="m15.5 13.333 1.533 1.322c.645.555.967.833.967 1.178s-.322.623-.967 1.179L15.5 18.333m-3.333-5-1.534 1.322c-.644.555-.966.833-.966 1.178s.322.623.966 1.179l1.534 1.321"/>
|
||||
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M17.167 10.836v-4.32c0-1.41 0-2.117-.224-2.68-.359-.906-1.118-1.621-2.08-1.96-.599-.21-1.349-.21-2.848-.21-2.623 0-3.935 0-4.983.369-1.684.591-3.013 1.842-3.641 3.428C3 6.449 3 7.684 3 10.154v2.122c0 2.558 0 3.838.706 4.726q.306.383.713.671c.76.536 1.79.64 3.581.66"/>
|
||||
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M3 10a2.78 2.78 0 0 1 2.778-2.778c.555 0 1.209.097 1.748-.047.48-.129.854-.503.982-.982.145-.54.048-1.194.048-1.749a2.78 2.78 0 0 1 2.777-2.777"/>
|
||||
</symbol>
|
||||
<symbol id="github-icon" viewBox="0 0 19 19">
|
||||
<path fill="#08060d" fill-rule="evenodd" d="M9.356 1.85C5.05 1.85 1.57 5.356 1.57 9.694a7.84 7.84 0 0 0 5.324 7.44c.387.079.528-.168.528-.376 0-.182-.013-.805-.013-1.454-2.165.467-2.616-.935-2.616-.935-.349-.91-.864-1.143-.864-1.143-.71-.48.051-.48.051-.48.787.051 1.2.805 1.2.805.695 1.194 1.817.857 2.268.649.064-.507.27-.857.49-1.052-1.728-.182-3.545-.857-3.545-3.87 0-.857.31-1.558.8-2.104-.078-.195-.349-1 .077-2.078 0 0 .657-.208 2.14.805a7.5 7.5 0 0 1 1.946-.26c.657 0 1.328.092 1.946.26 1.483-1.013 2.14-.805 2.14-.805.426 1.078.155 1.883.078 2.078.502.546.799 1.247.799 2.104 0 3.013-1.818 3.675-3.558 3.87.284.247.528.714.528 1.454 0 1.052-.012 1.896-.012 2.156 0 .208.142.455.528.377a7.84 7.84 0 0 0 5.324-7.441c.013-4.338-3.48-7.844-7.773-7.844" clip-rule="evenodd"/>
|
||||
</symbol>
|
||||
<symbol id="social-icon" viewBox="0 0 20 20">
|
||||
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M12.5 6.667a4.167 4.167 0 1 0-8.334 0 4.167 4.167 0 0 0 8.334 0"/>
|
||||
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M2.5 16.667a5.833 5.833 0 0 1 8.75-5.053m3.837.474.513 1.035c.07.144.257.282.414.309l.93.155c.596.1.736.536.307.965l-.723.73a.64.64 0 0 0-.152.531l.207.903c.164.715-.213.991-.84.618l-.872-.52a.63.63 0 0 0-.577 0l-.872.52c-.624.373-1.003.094-.84-.618l.207-.903a.64.64 0 0 0-.152-.532l-.723-.729c-.426-.43-.289-.864.306-.964l.93-.156a.64.64 0 0 0 .412-.31l.513-1.034c.28-.562.735-.562 1.012 0"/>
|
||||
</symbol>
|
||||
<symbol id="x-icon" viewBox="0 0 19 19">
|
||||
<path fill="#08060d" fill-rule="evenodd" d="M1.893 1.98c.052.072 1.245 1.769 2.653 3.77l2.892 4.114c.183.261.333.48.333.486s-.068.089-.152.183l-.522.593-.765.867-3.597 4.087c-.375.426-.734.834-.798.905a1 1 0 0 0-.118.148c0 .01.236.017.664.017h.663l.729-.83c.4-.457.796-.906.879-.999a692 692 0 0 0 1.794-2.038c.034-.037.301-.34.594-.675l.551-.624.345-.392a7 7 0 0 1 .34-.374c.006 0 .93 1.306 2.052 2.903l2.084 2.965.045.063h2.275c1.87 0 2.273-.003 2.266-.021-.008-.02-1.098-1.572-3.894-5.547-2.013-2.862-2.28-3.246-2.273-3.266.008-.019.282-.332 2.085-2.38l2-2.274 1.567-1.782c.022-.028-.016-.03-.65-.03h-.674l-.3.342a871 871 0 0 1-1.782 2.025c-.067.075-.405.458-.75.852a100 100 0 0 1-.803.91c-.148.172-.299.344-.99 1.127-.304.343-.32.358-.345.327-.015-.019-.904-1.282-1.976-2.808L6.365 1.85H1.8zm1.782.91 8.078 11.294c.772 1.08 1.413 1.973 1.425 1.984.016.017.241.02 1.05.017l1.03-.004-2.694-3.766L7.796 5.75 5.722 2.852l-1.039-.004-1.039-.004z" clip-rule="evenodd"/>
|
||||
</symbol>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.9 KiB |
71
furumi-node-player/client/src/App.css
Normal file
71
furumi-node-player/client/src/App.css
Normal file
@@ -0,0 +1,71 @@
|
||||
.page {
|
||||
min-height: 100vh;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.card {
|
||||
width: min(520px, 100%);
|
||||
border: 1px solid #d8dde6;
|
||||
border-radius: 14px;
|
||||
padding: 24px;
|
||||
background-color: #ffffff;
|
||||
box-shadow: 0 12px 30px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
margin-top: 0;
|
||||
margin-bottom: 20px;
|
||||
color: #5a6475;
|
||||
}
|
||||
|
||||
.settings {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px;
|
||||
border: 1px solid #e6eaf2;
|
||||
border-radius: 10px;
|
||||
background: #f8fafc;
|
||||
}
|
||||
|
||||
.toggle {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: #0f172a;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.toggle input {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
}
|
||||
|
||||
.hint {
|
||||
margin: 10px 0 0;
|
||||
color: #5a6475;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
text-decoration: none;
|
||||
background: #2251ff;
|
||||
color: #ffffff;
|
||||
padding: 10px 16px;
|
||||
border-radius: 8px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.btn.ghost {
|
||||
background: #edf1ff;
|
||||
color: #1e3fc4;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.profile p {
|
||||
margin: 8px 0;
|
||||
}
|
||||
|
||||
.error {
|
||||
color: #cc1e1e;
|
||||
}
|
||||
142
furumi-node-player/client/src/App.tsx
Normal file
142
furumi-node-player/client/src/App.tsx
Normal file
@@ -0,0 +1,142 @@
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { FurumiPlayer } from './FurumiPlayer'
|
||||
import './App.css'
|
||||
|
||||
type UserProfile = {
|
||||
sub: string
|
||||
name?: string
|
||||
email?: string
|
||||
}
|
||||
|
||||
const NO_AUTH_STORAGE_KEY = 'furumiNodePlayer.runWithoutAuth'
|
||||
|
||||
function App() {
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [user, setUser] = useState<UserProfile | null>(null)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [runWithoutAuth, setRunWithoutAuth] = useState(() => {
|
||||
try {
|
||||
return window.localStorage.getItem(NO_AUTH_STORAGE_KEY) === '1'
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
const apiBase = useMemo(() => import.meta.env.VITE_API_BASE_URL ?? '', [])
|
||||
|
||||
useEffect(() => {
|
||||
if (runWithoutAuth) {
|
||||
setError(null)
|
||||
setUser({ sub: 'noauth', name: 'No Auth' })
|
||||
setLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
const loadMe = async () => {
|
||||
try {
|
||||
const response = await fetch(`${apiBase}/api/me`, {
|
||||
credentials: 'include',
|
||||
})
|
||||
|
||||
if (response.status === 401) {
|
||||
setUser(null)
|
||||
return
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Request failed with status ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
setUser(data.user ?? null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load session')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
void loadMe()
|
||||
}, [apiBase, runWithoutAuth])
|
||||
|
||||
const loginUrl = `${apiBase}/api/login`
|
||||
const logoutUrl = `${apiBase}/api/logout`
|
||||
const playerApiRoot = `${apiBase}/api`
|
||||
|
||||
return (
|
||||
<>
|
||||
{!loading && (user || runWithoutAuth) ? (
|
||||
<FurumiPlayer apiRoot={playerApiRoot} />
|
||||
) : (
|
||||
<main className="page">
|
||||
<section className="card">
|
||||
<h1>OIDC Login</h1>
|
||||
<p className="subtitle">Авторизация обрабатывается на Express сервере.</p>
|
||||
|
||||
<div className="settings">
|
||||
<label className="toggle">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={runWithoutAuth}
|
||||
onChange={(e) => {
|
||||
const next = e.target.checked
|
||||
setRunWithoutAuth(next)
|
||||
try {
|
||||
if (next) window.localStorage.setItem(NO_AUTH_STORAGE_KEY, '1')
|
||||
else window.localStorage.removeItem(NO_AUTH_STORAGE_KEY)
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
setLoading(true)
|
||||
setUser(null)
|
||||
}}
|
||||
/>
|
||||
<span>Запускать без авторизации</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
{loading && <p>Проверяю сессию...</p>}
|
||||
{error && <p className="error">Ошибка: {error}</p>}
|
||||
|
||||
{!loading && runWithoutAuth && (
|
||||
<p className="hint">
|
||||
Режим без авторизации включён. Для входа отключи настройку выше.
|
||||
</p>
|
||||
)}
|
||||
|
||||
{!loading && !user && (
|
||||
<a className="btn" href={loginUrl}>
|
||||
Войти через OIDC
|
||||
</a>
|
||||
)}
|
||||
|
||||
{!loading && user && (
|
||||
<div className="profile">
|
||||
<p>
|
||||
<strong>ID:</strong> {user.sub}
|
||||
</p>
|
||||
{user.name && (
|
||||
<p>
|
||||
<strong>Имя:</strong> {user.name}
|
||||
</p>
|
||||
)}
|
||||
{user.email && (
|
||||
<p>
|
||||
<strong>Email:</strong> {user.email}
|
||||
</p>
|
||||
)}
|
||||
{!runWithoutAuth && (
|
||||
<a className="btn ghost" href={logoutUrl}>
|
||||
Выйти
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
</main>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default App
|
||||
767
furumi-node-player/client/src/FurumiPlayer.tsx
Normal file
767
furumi-node-player/client/src/FurumiPlayer.tsx
Normal file
@@ -0,0 +1,767 @@
|
||||
import { useEffect, useRef, useState, type MouseEvent as ReactMouseEvent } from 'react'
|
||||
import './furumi-player.css'
|
||||
import { createFurumiApiClient } from './furumiApi'
|
||||
import { SearchDropdown } from './components/SearchDropdown'
|
||||
import { Breadcrumbs } from './components/Breadcrumbs'
|
||||
import { LibraryList } from './components/LibraryList'
|
||||
import { QueueList, type QueueItem } from './components/QueueList'
|
||||
import { NowPlaying } from './components/NowPlaying'
|
||||
|
||||
type FurumiPlayerProps = {
|
||||
apiRoot: string
|
||||
}
|
||||
|
||||
type Crumb = { label: string; action?: () => void }
|
||||
|
||||
export function FurumiPlayer({ apiRoot }: FurumiPlayerProps) {
|
||||
const [breadcrumbs, setBreadcrumbs] = useState<Array<{ label: string; action?: () => void }>>(
|
||||
[],
|
||||
)
|
||||
const [libraryLoading, setLibraryLoading] = useState(false)
|
||||
const [libraryError, setLibraryError] = useState<string | null>(null)
|
||||
const [libraryItems, setLibraryItems] = useState<
|
||||
Array<{
|
||||
key: string
|
||||
className: string
|
||||
icon: string
|
||||
name: string
|
||||
detail?: string
|
||||
nameClassName?: string
|
||||
onClick: () => void
|
||||
button?: { title: string; onClick: (ev: ReactMouseEvent<HTMLButtonElement>) => void }
|
||||
}>
|
||||
>([])
|
||||
const [searchResults, setSearchResults] = useState<
|
||||
Array<{ result_type: string; slug: string; name: string; detail?: string }>
|
||||
>([])
|
||||
const [searchOpen, setSearchOpen] = useState(false)
|
||||
const searchSelectRef = useRef<(type: string, slug: string) => void>(() => {})
|
||||
|
||||
const [nowPlayingTrack, setNowPlayingTrack] = useState<QueueItem | null>(null)
|
||||
const [queueItemsView, setQueueItemsView] = useState<QueueItem[]>([])
|
||||
const [queueOrderView, setQueueOrderView] = useState<number[]>([])
|
||||
const [queuePlayingOrigIdxView, setQueuePlayingOrigIdxView] = useState<number>(-1)
|
||||
const [queueScrollSignal, setQueueScrollSignal] = useState(0)
|
||||
|
||||
const queueActionsRef = useRef<{
|
||||
playIndex: (i: number) => void
|
||||
removeFromQueue: (idx: number) => void
|
||||
moveQueueItem: (fromPos: number, toPos: number) => void
|
||||
} | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
// --- Original player script adapted for React environment ---
|
||||
const audio = document.getElementById('audioEl') as HTMLAudioElement
|
||||
if (!audio) return
|
||||
|
||||
let queue: QueueItem[] = []
|
||||
let queueIndex = -1
|
||||
let shuffle = false
|
||||
let repeatAll = true
|
||||
let shuffleOrder: number[] = []
|
||||
let searchTimer: number | null = null
|
||||
let toastTimer: number | null = null
|
||||
let muted = false
|
||||
|
||||
// Restore prefs
|
||||
try {
|
||||
const v = window.localStorage.getItem('furumi_vol')
|
||||
const volSlider = document.getElementById('volSlider') as HTMLInputElement | null
|
||||
if (v !== null && volSlider) {
|
||||
audio.volume = Number(v) / 100
|
||||
volSlider.value = v
|
||||
}
|
||||
const btnShuffle = document.getElementById('btnShuffle')
|
||||
const btnRepeat = document.getElementById('btnRepeat')
|
||||
shuffle = window.localStorage.getItem('furumi_shuffle') === '1'
|
||||
repeatAll = window.localStorage.getItem('furumi_repeat') !== '0'
|
||||
btnShuffle?.classList.toggle('active', shuffle)
|
||||
btnRepeat?.classList.toggle('active', repeatAll)
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
// --- Audio events ---
|
||||
audio.addEventListener('timeupdate', () => {
|
||||
if (audio.duration) {
|
||||
const fill = document.getElementById('progressFill')
|
||||
const timeElapsed = document.getElementById('timeElapsed')
|
||||
const timeDuration = document.getElementById('timeDuration')
|
||||
if (fill) fill.style.width = `${(audio.currentTime / audio.duration) * 100}%`
|
||||
if (timeElapsed) timeElapsed.textContent = fmt(audio.currentTime)
|
||||
if (timeDuration) timeDuration.textContent = fmt(audio.duration)
|
||||
}
|
||||
})
|
||||
audio.addEventListener('ended', () => nextTrack())
|
||||
audio.addEventListener('play', () => {
|
||||
const btn = document.getElementById('btnPlayPause')
|
||||
if (btn) btn.innerHTML = '⏸'
|
||||
})
|
||||
audio.addEventListener('pause', () => {
|
||||
const btn = document.getElementById('btnPlayPause')
|
||||
if (btn) btn.innerHTML = '▶'
|
||||
})
|
||||
audio.addEventListener('error', () => {
|
||||
showToast('Playback error')
|
||||
nextTrack()
|
||||
})
|
||||
|
||||
// --- API helper ---
|
||||
const API = apiRoot
|
||||
const api = createFurumiApiClient(API)
|
||||
|
||||
// --- Library navigation ---
|
||||
async function showArtists() {
|
||||
setBreadcrumb([{ label: 'Artists', action: showArtists }])
|
||||
setLibraryLoading(true)
|
||||
setLibraryError(null)
|
||||
const artists = await api('/artists')
|
||||
if (!artists) {
|
||||
setLibraryLoading(false)
|
||||
setLibraryError('Error')
|
||||
return
|
||||
}
|
||||
setLibraryLoading(false)
|
||||
setLibraryItems(
|
||||
(artists as any[]).map((a) => ({
|
||||
key: `artist:${a.slug}`,
|
||||
className: 'file-item dir',
|
||||
icon: '👤',
|
||||
name: a.name,
|
||||
detail: `${a.album_count} albums`,
|
||||
onClick: () => void showArtistAlbums(a.slug, a.name),
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
async function showArtistAlbums(artistSlug: string, artistName: string) {
|
||||
setBreadcrumb([
|
||||
{ label: 'Artists', action: showArtists },
|
||||
{ label: artistName, action: () => showArtistAlbums(artistSlug, artistName) },
|
||||
])
|
||||
setLibraryLoading(true)
|
||||
setLibraryError(null)
|
||||
const albums = await api('/artists/' + artistSlug + '/albums')
|
||||
if (!albums) {
|
||||
setLibraryLoading(false)
|
||||
setLibraryError('Error')
|
||||
return
|
||||
}
|
||||
setLibraryLoading(false)
|
||||
const allTracksItem = {
|
||||
key: `artist-all:${artistSlug}`,
|
||||
className: 'file-item',
|
||||
icon: '▶',
|
||||
name: 'Play all tracks',
|
||||
nameClassName: 'name',
|
||||
onClick: () => void playAllArtistTracks(artistSlug),
|
||||
}
|
||||
const albumItems = (albums as any[]).map((a) => {
|
||||
const year = a.year ? ` (${a.year})` : ''
|
||||
return {
|
||||
key: `album:${a.slug}`,
|
||||
className: 'file-item dir',
|
||||
icon: '💿',
|
||||
name: `${a.name}${year}`,
|
||||
detail: `${a.track_count} tracks`,
|
||||
onClick: () => void showAlbumTracks(a.slug, a.name, artistSlug, artistName),
|
||||
button: {
|
||||
title: 'Add album to queue',
|
||||
onClick: (ev: ReactMouseEvent<HTMLButtonElement>) => {
|
||||
ev.stopPropagation()
|
||||
void addAlbumToQueue(a.slug)
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
setLibraryItems([allTracksItem, ...albumItems])
|
||||
}
|
||||
|
||||
async function showAlbumTracks(
|
||||
albumSlug: string,
|
||||
albumName: string,
|
||||
artistSlug: string,
|
||||
artistName: string,
|
||||
) {
|
||||
setBreadcrumb([
|
||||
{ label: 'Artists', action: showArtists },
|
||||
{ label: artistName, action: () => showArtistAlbums(artistSlug, artistName) },
|
||||
{ label: albumName },
|
||||
])
|
||||
setLibraryLoading(true)
|
||||
setLibraryError(null)
|
||||
const tracks = await api('/albums/' + albumSlug)
|
||||
if (!tracks) {
|
||||
setLibraryLoading(false)
|
||||
setLibraryError('Error')
|
||||
return
|
||||
}
|
||||
setLibraryLoading(false)
|
||||
const playAlbumItem = {
|
||||
key: `album-play:${albumSlug}`,
|
||||
className: 'file-item',
|
||||
icon: '▶',
|
||||
name: 'Play album',
|
||||
onClick: () => {
|
||||
void addAlbumToQueue(albumSlug, true)
|
||||
},
|
||||
}
|
||||
const trackItems = (tracks as any[]).map((t) => {
|
||||
const num = t.track_number ? `${t.track_number}. ` : ''
|
||||
const dur = t.duration_secs ? fmt(t.duration_secs) : ''
|
||||
return {
|
||||
key: `track:${t.slug}`,
|
||||
className: 'file-item',
|
||||
icon: '🎵',
|
||||
name: `${num}${t.title}`,
|
||||
detail: dur,
|
||||
onClick: () => {
|
||||
addTrackToQueue(
|
||||
{
|
||||
slug: t.slug,
|
||||
title: t.title,
|
||||
artist: t.artist_name,
|
||||
album_slug: albumSlug,
|
||||
duration: t.duration_secs,
|
||||
},
|
||||
true,
|
||||
)
|
||||
},
|
||||
}
|
||||
})
|
||||
setLibraryItems([playAlbumItem, ...trackItems])
|
||||
}
|
||||
|
||||
function setBreadcrumb(parts: Crumb[]) {
|
||||
setBreadcrumbs(parts)
|
||||
}
|
||||
|
||||
// --- Queue management ---
|
||||
function addTrackToQueue(
|
||||
track: {
|
||||
slug: string
|
||||
title: string
|
||||
artist: string
|
||||
album_slug: string | null
|
||||
duration: number | null
|
||||
},
|
||||
playNow?: boolean,
|
||||
) {
|
||||
const existing = queue.findIndex((t) => t.slug === track.slug)
|
||||
if (existing !== -1) {
|
||||
if (playNow) playIndex(existing)
|
||||
return
|
||||
}
|
||||
queue.push(track)
|
||||
updateQueueModel()
|
||||
if (playNow || (queueIndex === -1 && queue.length === 1)) {
|
||||
playIndex(queue.length - 1)
|
||||
}
|
||||
}
|
||||
|
||||
async function addAlbumToQueue(albumSlug: string, playFirst?: boolean) {
|
||||
const tracks = await api('/albums/' + albumSlug)
|
||||
if (!tracks || !(tracks as any[]).length) return
|
||||
const list = tracks as any[]
|
||||
let firstIdx = queue.length
|
||||
list.forEach((t) => {
|
||||
if (queue.find((q) => q.slug === t.slug)) return
|
||||
queue.push({
|
||||
slug: t.slug,
|
||||
title: t.title,
|
||||
artist: t.artist_name,
|
||||
album_slug: albumSlug,
|
||||
duration: t.duration_secs,
|
||||
})
|
||||
})
|
||||
updateQueueModel()
|
||||
if (playFirst || queueIndex === -1) playIndex(firstIdx)
|
||||
showToast(`Added ${list.length} tracks`)
|
||||
}
|
||||
|
||||
async function playAllArtistTracks(artistSlug: string) {
|
||||
const tracks = await api('/artists/' + artistSlug + '/tracks')
|
||||
if (!tracks || !(tracks as any[]).length) return
|
||||
const list = tracks as any[]
|
||||
clearQueue()
|
||||
list.forEach((t) => {
|
||||
queue.push({
|
||||
slug: t.slug,
|
||||
title: t.title,
|
||||
artist: t.artist_name,
|
||||
album_slug: t.album_slug,
|
||||
duration: t.duration_secs,
|
||||
})
|
||||
})
|
||||
updateQueueModel()
|
||||
playIndex(0)
|
||||
showToast(`Added ${list.length} tracks`)
|
||||
}
|
||||
|
||||
function playIndex(i: number) {
|
||||
if (i < 0 || i >= queue.length) return
|
||||
queueIndex = i
|
||||
const track = queue[i]
|
||||
audio.src = `${API}/stream/${track.slug}`
|
||||
void audio.play().catch(() => {})
|
||||
updateNowPlaying(track)
|
||||
updateQueueModel()
|
||||
setQueueScrollSignal((s) => s + 1)
|
||||
if (window.history && window.history.replaceState) {
|
||||
const url = new URL(window.location.href)
|
||||
url.searchParams.set('t', track.slug)
|
||||
window.history.replaceState(null, '', url.toString())
|
||||
}
|
||||
}
|
||||
|
||||
function updateNowPlaying(track: QueueItem | null) {
|
||||
setNowPlayingTrack(track)
|
||||
if (!track) return
|
||||
|
||||
document.title = `${track.title} — Furumi`
|
||||
|
||||
const coverUrl = `${API}/tracks/${track.slug}/cover`
|
||||
if ('mediaSession' in navigator) {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
navigator.mediaSession.metadata = new window.MediaMetadata({
|
||||
title: track.title,
|
||||
artist: track.artist || '',
|
||||
album: '',
|
||||
artwork: [{ src: coverUrl, sizes: '512x512' }],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function currentOrder() {
|
||||
if (!shuffle) return [...Array(queue.length).keys()]
|
||||
if (shuffleOrder.length !== queue.length) buildShuffleOrder()
|
||||
return shuffleOrder
|
||||
}
|
||||
|
||||
function buildShuffleOrder() {
|
||||
shuffleOrder = [...Array(queue.length).keys()]
|
||||
for (let i = shuffleOrder.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1))
|
||||
;[shuffleOrder[i], shuffleOrder[j]] = [shuffleOrder[j], shuffleOrder[i]]
|
||||
}
|
||||
if (queueIndex !== -1) {
|
||||
const ci = shuffleOrder.indexOf(queueIndex)
|
||||
if (ci > 0) {
|
||||
shuffleOrder.splice(ci, 1)
|
||||
shuffleOrder.unshift(queueIndex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateQueueModel() {
|
||||
const order = currentOrder()
|
||||
setQueueItemsView(queue.slice())
|
||||
setQueueOrderView(order.slice())
|
||||
setQueuePlayingOrigIdxView(queueIndex)
|
||||
}
|
||||
|
||||
function removeFromQueue(idx: number) {
|
||||
if (idx === queueIndex) {
|
||||
queueIndex = -1
|
||||
audio.pause()
|
||||
audio.src = ''
|
||||
updateNowPlaying(null)
|
||||
} else if (queueIndex > idx) {
|
||||
queueIndex--
|
||||
}
|
||||
queue.splice(idx, 1)
|
||||
if (shuffle) {
|
||||
const si = shuffleOrder.indexOf(idx)
|
||||
if (si !== -1) shuffleOrder.splice(si, 1)
|
||||
for (let i = 0; i < shuffleOrder.length; i++) {
|
||||
if (shuffleOrder[i] > idx) shuffleOrder[i]--
|
||||
}
|
||||
}
|
||||
updateQueueModel()
|
||||
}
|
||||
|
||||
function moveQueueItem(from: number, to: number) {
|
||||
if (from === to) return
|
||||
if (shuffle) {
|
||||
const item = shuffleOrder.splice(from, 1)[0]
|
||||
shuffleOrder.splice(to, 0, item)
|
||||
} else {
|
||||
const item = queue.splice(from, 1)[0]
|
||||
queue.splice(to, 0, item)
|
||||
if (queueIndex === from) queueIndex = to
|
||||
else if (from < queueIndex && to >= queueIndex) queueIndex--
|
||||
else if (from > queueIndex && to <= queueIndex) queueIndex++
|
||||
}
|
||||
updateQueueModel()
|
||||
}
|
||||
|
||||
queueActionsRef.current = {
|
||||
playIndex,
|
||||
removeFromQueue,
|
||||
moveQueueItem,
|
||||
}
|
||||
|
||||
function clearQueue() {
|
||||
queue = []
|
||||
queueIndex = -1
|
||||
shuffleOrder = []
|
||||
audio.pause()
|
||||
audio.src = ''
|
||||
updateNowPlaying(null)
|
||||
document.title = 'Furumi Player'
|
||||
updateQueueModel()
|
||||
}
|
||||
|
||||
// --- Playback controls ---
|
||||
function togglePlay() {
|
||||
if (!audio.src && queue.length) {
|
||||
playIndex(queueIndex === -1 ? 0 : queueIndex)
|
||||
return
|
||||
}
|
||||
if (audio.paused) void audio.play()
|
||||
else audio.pause()
|
||||
}
|
||||
|
||||
function nextTrack() {
|
||||
if (!queue.length) return
|
||||
const order = currentOrder()
|
||||
const pos = order.indexOf(queueIndex)
|
||||
if (pos < order.length - 1) playIndex(order[pos + 1])
|
||||
else if (repeatAll) {
|
||||
if (shuffle) buildShuffleOrder()
|
||||
playIndex(currentOrder()[0])
|
||||
}
|
||||
}
|
||||
|
||||
function prevTrack() {
|
||||
if (!queue.length) return
|
||||
if (audio.currentTime > 3) {
|
||||
audio.currentTime = 0
|
||||
return
|
||||
}
|
||||
const order = currentOrder()
|
||||
const pos = order.indexOf(queueIndex)
|
||||
if (pos > 0) playIndex(order[pos - 1])
|
||||
else if (repeatAll) playIndex(order[order.length - 1])
|
||||
}
|
||||
|
||||
function toggleShuffle() {
|
||||
shuffle = !shuffle
|
||||
if (shuffle) buildShuffleOrder()
|
||||
const btn = document.getElementById('btnShuffle')
|
||||
btn?.classList.toggle('active', shuffle)
|
||||
window.localStorage.setItem('furumi_shuffle', shuffle ? '1' : '0')
|
||||
updateQueueModel()
|
||||
}
|
||||
|
||||
function toggleRepeat() {
|
||||
repeatAll = !repeatAll
|
||||
const btn = document.getElementById('btnRepeat')
|
||||
btn?.classList.toggle('active', repeatAll)
|
||||
window.localStorage.setItem('furumi_repeat', repeatAll ? '1' : '0')
|
||||
}
|
||||
|
||||
// --- Seek & Volume ---
|
||||
function seekTo(e: MouseEvent) {
|
||||
if (!audio.duration) return
|
||||
const bar = document.getElementById('progressBar') as HTMLDivElement | null
|
||||
if (!bar) return
|
||||
const rect = bar.getBoundingClientRect()
|
||||
const pct = (e.clientX - rect.left) / rect.width
|
||||
audio.currentTime = pct * audio.duration
|
||||
}
|
||||
|
||||
function toggleMute() {
|
||||
muted = !muted
|
||||
audio.muted = muted
|
||||
const volIcon = document.getElementById('volIcon')
|
||||
if (volIcon) volIcon.innerHTML = muted ? '🔇' : '🔊'
|
||||
}
|
||||
|
||||
function setVolume(v: number) {
|
||||
audio.volume = v / 100
|
||||
const volIcon = document.getElementById('volIcon')
|
||||
if (volIcon) volIcon.innerHTML = v === 0 ? '🔇' : '🔊'
|
||||
window.localStorage.setItem('furumi_vol', String(v))
|
||||
}
|
||||
|
||||
// --- Search ---
|
||||
function onSearch(q: string) {
|
||||
if (searchTimer) {
|
||||
window.clearTimeout(searchTimer)
|
||||
}
|
||||
if (q.length < 2) {
|
||||
closeSearch()
|
||||
return
|
||||
}
|
||||
searchTimer = window.setTimeout(async () => {
|
||||
const results = await api('/search?q=' + encodeURIComponent(q))
|
||||
if (!results || !(results as any[]).length) {
|
||||
closeSearch()
|
||||
return
|
||||
}
|
||||
setSearchResults(results as any[])
|
||||
setSearchOpen(true)
|
||||
}, 250)
|
||||
}
|
||||
|
||||
function closeSearch() {
|
||||
setSearchOpen(false)
|
||||
setSearchResults([])
|
||||
}
|
||||
|
||||
function onSearchSelect(type: string, slug: string) {
|
||||
closeSearch()
|
||||
if (type === 'artist') void showArtistAlbums(slug, '')
|
||||
else if (type === 'album') void addAlbumToQueue(slug, true)
|
||||
else if (type === 'track') {
|
||||
addTrackToQueue(
|
||||
{ slug, title: '', artist: '', album_slug: null, duration: null },
|
||||
true,
|
||||
)
|
||||
void api('/stream/' + slug).catch(() => null)
|
||||
}
|
||||
}
|
||||
searchSelectRef.current = onSearchSelect
|
||||
|
||||
// --- Helpers ---
|
||||
function fmt(secs: number) {
|
||||
if (!secs || Number.isNaN(secs)) return '0:00'
|
||||
const s = Math.floor(secs)
|
||||
const m = Math.floor(s / 60)
|
||||
const h = Math.floor(m / 60)
|
||||
if (h > 0) {
|
||||
return `${h}:${pad(m % 60)}:${pad(s % 60)}`
|
||||
}
|
||||
return `${m}:${pad(s % 60)}`
|
||||
}
|
||||
|
||||
function pad(n: number) {
|
||||
return String(n).padStart(2, '0')
|
||||
}
|
||||
|
||||
function showToast(msg: string) {
|
||||
const t = document.getElementById('toast')
|
||||
if (!t) return
|
||||
t.textContent = msg
|
||||
t.classList.add('show')
|
||||
if (toastTimer) window.clearTimeout(toastTimer)
|
||||
toastTimer = window.setTimeout(() => t.classList.remove('show'), 2500)
|
||||
}
|
||||
|
||||
function toggleSidebar() {
|
||||
const sidebar = document.getElementById('sidebar')
|
||||
const overlay = document.getElementById('sidebarOverlay')
|
||||
sidebar?.classList.toggle('open')
|
||||
overlay?.classList.toggle('show')
|
||||
}
|
||||
|
||||
// --- MediaSession ---
|
||||
if ('mediaSession' in navigator) {
|
||||
try {
|
||||
navigator.mediaSession.setActionHandler('play', togglePlay)
|
||||
navigator.mediaSession.setActionHandler('pause', togglePlay)
|
||||
navigator.mediaSession.setActionHandler('previoustrack', prevTrack)
|
||||
navigator.mediaSession.setActionHandler('nexttrack', nextTrack)
|
||||
navigator.mediaSession.setActionHandler('seekto', (d: any) => {
|
||||
if (typeof d.seekTime === 'number') {
|
||||
audio.currentTime = d.seekTime
|
||||
}
|
||||
})
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// --- Wire DOM events that were inline in HTML ---
|
||||
const btnMenu = document.querySelector('.btn-menu')
|
||||
btnMenu?.addEventListener('click', () => toggleSidebar())
|
||||
|
||||
const sidebarOverlay = document.getElementById('sidebarOverlay')
|
||||
sidebarOverlay?.addEventListener('click', () => toggleSidebar())
|
||||
|
||||
const searchInput = document.getElementById('searchInput') as HTMLInputElement | null
|
||||
if (searchInput) {
|
||||
searchInput.addEventListener('input', (e) => {
|
||||
onSearch((e.target as HTMLInputElement).value)
|
||||
})
|
||||
searchInput.addEventListener('keydown', (e: KeyboardEvent) => {
|
||||
if (e.key === 'Escape') closeSearch()
|
||||
})
|
||||
}
|
||||
|
||||
const btnShuffle = document.getElementById('btnShuffle')
|
||||
btnShuffle?.addEventListener('click', () => toggleShuffle())
|
||||
const btnRepeat = document.getElementById('btnRepeat')
|
||||
btnRepeat?.addEventListener('click', () => toggleRepeat())
|
||||
const btnClear = document.getElementById('btnClearQueue')
|
||||
btnClear?.addEventListener('click', () => clearQueue())
|
||||
|
||||
const btnPrev = document.getElementById('btnPrev')
|
||||
btnPrev?.addEventListener('click', () => prevTrack())
|
||||
const btnPlay = document.getElementById('btnPlayPause')
|
||||
btnPlay?.addEventListener('click', () => togglePlay())
|
||||
const btnNext = document.getElementById('btnNext')
|
||||
btnNext?.addEventListener('click', () => nextTrack())
|
||||
|
||||
const progressBar = document.getElementById('progressBar')
|
||||
progressBar?.addEventListener('click', (e) => seekTo(e as MouseEvent))
|
||||
|
||||
const volIcon = document.getElementById('volIcon')
|
||||
volIcon?.addEventListener('click', () => toggleMute())
|
||||
const volSlider = document.getElementById('volSlider') as HTMLInputElement | null
|
||||
if (volSlider) {
|
||||
volSlider.addEventListener('input', (e) => {
|
||||
const v = Number((e.target as HTMLInputElement).value)
|
||||
setVolume(v)
|
||||
})
|
||||
}
|
||||
|
||||
const clearQueueBtn = document.getElementById('btnClearQueue')
|
||||
clearQueueBtn?.addEventListener('click', () => clearQueue())
|
||||
|
||||
// --- Init ---
|
||||
;(async () => {
|
||||
const url = new URL(window.location.href)
|
||||
const urlSlug = url.searchParams.get('t')
|
||||
if (urlSlug) {
|
||||
const info = await api('/tracks/' + urlSlug)
|
||||
if (info) {
|
||||
addTrackToQueue(
|
||||
{
|
||||
slug: (info as any).slug,
|
||||
title: (info as any).title,
|
||||
artist: (info as any).artist_name,
|
||||
album_slug: (info as any).album_slug,
|
||||
duration: (info as any).duration_secs,
|
||||
},
|
||||
true,
|
||||
)
|
||||
}
|
||||
}
|
||||
void showArtists()
|
||||
})()
|
||||
|
||||
// Cleanup: best-effort remove listeners on unmount
|
||||
return () => {
|
||||
queueActionsRef.current = null
|
||||
audio.pause()
|
||||
}
|
||||
}, [apiRoot])
|
||||
|
||||
return (
|
||||
<div className="furumi-root">
|
||||
<header className="header">
|
||||
<div className="header-logo">
|
||||
<button className="btn-menu">☰</button>
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<circle cx="9" cy="18" r="3" />
|
||||
<circle cx="18" cy="15" r="3" />
|
||||
<path d="M12 18V6l9-3v3" />
|
||||
</svg>
|
||||
Furumi
|
||||
<span className="header-version">v</span>
|
||||
</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '1rem' }}>
|
||||
<div className="search-wrap">
|
||||
<input id="searchInput" placeholder="Search..." />
|
||||
<SearchDropdown
|
||||
isOpen={searchOpen}
|
||||
results={searchResults}
|
||||
onSelect={(type, slug) => searchSelectRef.current(type, slug)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div className="main">
|
||||
<div className="sidebar-overlay" id="sidebarOverlay" />
|
||||
<aside className="sidebar" id="sidebar">
|
||||
<div className="sidebar-header">Library</div>
|
||||
<Breadcrumbs items={breadcrumbs} />
|
||||
<div className="file-list" id="fileList">
|
||||
<LibraryList loading={libraryLoading} error={libraryError} items={libraryItems} />
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
<section className="queue-panel">
|
||||
<div className="queue-header">
|
||||
<span>Queue</span>
|
||||
<div className="queue-actions">
|
||||
<button className="queue-btn active" id="btnShuffle">
|
||||
Shuffle
|
||||
</button>
|
||||
<button className="queue-btn active" id="btnRepeat">
|
||||
Repeat
|
||||
</button>
|
||||
<button className="queue-btn" id="btnClearQueue">
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="queue-list" id="queueList">
|
||||
<QueueList
|
||||
apiRoot={apiRoot}
|
||||
queue={queueItemsView}
|
||||
order={queueOrderView}
|
||||
playingOrigIdx={queuePlayingOrigIdxView}
|
||||
scrollSignal={queueScrollSignal}
|
||||
onPlay={(origIdx) => queueActionsRef.current?.playIndex(origIdx)}
|
||||
onRemove={(origIdx) =>
|
||||
queueActionsRef.current?.removeFromQueue(origIdx)
|
||||
}
|
||||
onMove={(fromPos, toPos) =>
|
||||
queueActionsRef.current?.moveQueueItem(fromPos, toPos)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<div className="player-bar">
|
||||
<NowPlaying apiRoot={apiRoot} track={nowPlayingTrack} />
|
||||
<div className="controls">
|
||||
<div className="ctrl-btns">
|
||||
<button className="ctrl-btn" id="btnPrev">
|
||||
⏮
|
||||
</button>
|
||||
<button className="ctrl-btn ctrl-btn-main" id="btnPlayPause">
|
||||
▶
|
||||
</button>
|
||||
<button className="ctrl-btn" id="btnNext">
|
||||
⏭
|
||||
</button>
|
||||
</div>
|
||||
<div className="progress-row">
|
||||
<span className="time" id="timeElapsed">
|
||||
0:00
|
||||
</span>
|
||||
<div className="progress-bar" id="progressBar">
|
||||
<div className="progress-fill" id="progressFill" style={{ width: '0%' }} />
|
||||
</div>
|
||||
<span className="time" id="timeDuration">
|
||||
0:00
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className="volume-row">
|
||||
<span className="vol-icon" id="volIcon">
|
||||
🔊
|
||||
</span>
|
||||
<input
|
||||
type="range"
|
||||
className="volume-slider"
|
||||
id="volSlider"
|
||||
min={0}
|
||||
max={100}
|
||||
defaultValue={80}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="toast" id="toast" />
|
||||
<audio id="audioEl" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
BIN
furumi-node-player/client/src/assets/hero.png
Normal file
BIN
furumi-node-player/client/src/assets/hero.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
1
furumi-node-player/client/src/assets/react.svg
Normal file
1
furumi-node-player/client/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
1
furumi-node-player/client/src/assets/vite.svg
Normal file
1
furumi-node-player/client/src/assets/vite.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 8.5 KiB |
30
furumi-node-player/client/src/components/Breadcrumbs.tsx
Normal file
30
furumi-node-player/client/src/components/Breadcrumbs.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
type Crumb = {
|
||||
label: string
|
||||
action?: () => void
|
||||
}
|
||||
|
||||
type BreadcrumbsProps = {
|
||||
items: Crumb[]
|
||||
}
|
||||
|
||||
export function Breadcrumbs({ items }: BreadcrumbsProps) {
|
||||
if (!items.length) return null
|
||||
|
||||
return (
|
||||
<div className="breadcrumb">
|
||||
{items.map((item, index) => {
|
||||
const isLast = index === items.length - 1
|
||||
return (
|
||||
<span key={`${item.label}-${index}`}>
|
||||
{!isLast && item.action ? (
|
||||
<span onClick={item.action}>{item.label}</span>
|
||||
) : (
|
||||
<span>{item.label}</span>
|
||||
)}
|
||||
{!isLast ? ' / ' : ''}
|
||||
</span>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
54
furumi-node-player/client/src/components/LibraryList.tsx
Normal file
54
furumi-node-player/client/src/components/LibraryList.tsx
Normal file
@@ -0,0 +1,54 @@
|
||||
import type { MouseEvent } from 'react'
|
||||
|
||||
type LibraryListButton = {
|
||||
title: string
|
||||
onClick: (ev: MouseEvent<HTMLButtonElement>) => void
|
||||
}
|
||||
|
||||
type LibraryListItem = {
|
||||
key: string
|
||||
className: string
|
||||
icon: string
|
||||
name: string
|
||||
detail?: string
|
||||
nameClassName?: string
|
||||
onClick: () => void
|
||||
button?: LibraryListButton
|
||||
}
|
||||
|
||||
type LibraryListProps = {
|
||||
loading: boolean
|
||||
error: string | null
|
||||
items: LibraryListItem[]
|
||||
}
|
||||
|
||||
export function LibraryList({ loading, error, items }: LibraryListProps) {
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ padding: '2rem', textAlign: 'center' }}>
|
||||
<div className="spinner" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return <div style={{ padding: '1rem', color: 'var(--danger)' }}>{error}</div>
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{items.map((item) => (
|
||||
<div key={item.key} className={item.className} onClick={item.onClick}>
|
||||
<span className="icon">{item.icon}</span>
|
||||
<span className={item.nameClassName ?? 'name'}>{item.name}</span>
|
||||
{item.detail ? <span className="detail">{item.detail}</span> : null}
|
||||
{item.button ? (
|
||||
<button className="add-btn" title={item.button.title} onClick={item.button.onClick}>
|
||||
➕
|
||||
</button>
|
||||
) : null}
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
)
|
||||
}
|
||||
52
furumi-node-player/client/src/components/NowPlaying.tsx
Normal file
52
furumi-node-player/client/src/components/NowPlaying.tsx
Normal file
@@ -0,0 +1,52 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
import type { QueueItem } from './QueueList'
|
||||
|
||||
function Cover({ src }: { src: string }) {
|
||||
const [errored, setErrored] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
setErrored(false)
|
||||
}, [src])
|
||||
|
||||
if (errored) return <>🎵</>
|
||||
return <img src={src} alt="" onError={() => setErrored(true)} />
|
||||
}
|
||||
|
||||
export function NowPlaying({ apiRoot, track }: { apiRoot: string; track: QueueItem | null }) {
|
||||
if (!track) {
|
||||
return (
|
||||
<div className="np-info">
|
||||
<div className="np-cover" id="npCover">
|
||||
🎵
|
||||
</div>
|
||||
<div className="np-text">
|
||||
<div className="np-title" id="npTitle">
|
||||
Nothing playing
|
||||
</div>
|
||||
<div className="np-artist" id="npArtist">
|
||||
—
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const coverUrl = `${apiRoot}/tracks/${track.slug}/cover`
|
||||
|
||||
return (
|
||||
<div className="np-info">
|
||||
<div className="np-cover" id="npCover">
|
||||
<Cover src={coverUrl} />
|
||||
</div>
|
||||
<div className="np-text">
|
||||
<div className="np-title" id="npTitle">
|
||||
{track.title}
|
||||
</div>
|
||||
<div className="np-artist" id="npArtist">
|
||||
{track.artist || '—'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
144
furumi-node-player/client/src/components/QueueList.tsx
Normal file
144
furumi-node-player/client/src/components/QueueList.tsx
Normal file
@@ -0,0 +1,144 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
|
||||
export type QueueItem = {
|
||||
slug: string
|
||||
title: string
|
||||
artist: string
|
||||
album_slug: string | null
|
||||
duration: number | null
|
||||
}
|
||||
|
||||
type QueueListProps = {
|
||||
apiRoot: string
|
||||
queue: QueueItem[]
|
||||
order: number[]
|
||||
playingOrigIdx: number
|
||||
scrollSignal: number
|
||||
onPlay: (origIdx: number) => void
|
||||
onRemove: (origIdx: number) => void
|
||||
onMove: (fromPos: number, toPos: number) => void
|
||||
}
|
||||
|
||||
function pad(n: number) {
|
||||
return String(n).padStart(2, '0')
|
||||
}
|
||||
|
||||
function fmt(secs: number) {
|
||||
if (!secs || Number.isNaN(secs)) return '0:00'
|
||||
const s = Math.floor(secs)
|
||||
const m = Math.floor(s / 60)
|
||||
const h = Math.floor(m / 60)
|
||||
if (h > 0) return `${h}:${pad(m % 60)}:${pad(s % 60)}`
|
||||
return `${m}:${pad(s % 60)}`
|
||||
}
|
||||
|
||||
function Cover({ src }: { src: string }) {
|
||||
const [errored, setErrored] = useState(false)
|
||||
useEffect(() => {
|
||||
setErrored(false)
|
||||
}, [src])
|
||||
|
||||
if (errored) return <>🎵</>
|
||||
return <img src={src} alt="" onError={() => setErrored(true)} />
|
||||
}
|
||||
|
||||
export function QueueList({
|
||||
apiRoot,
|
||||
queue,
|
||||
order,
|
||||
playingOrigIdx,
|
||||
scrollSignal,
|
||||
onPlay,
|
||||
onRemove,
|
||||
onMove,
|
||||
}: QueueListProps) {
|
||||
const playingRef = useRef<HTMLDivElement | null>(null)
|
||||
const [draggingPos, setDraggingPos] = useState<number | null>(null)
|
||||
const [dragOverPos, setDragOverPos] = useState<number | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (playingRef.current) {
|
||||
playingRef.current.scrollIntoView({ behavior: 'smooth', block: 'nearest' })
|
||||
}
|
||||
}, [playingOrigIdx, scrollSignal])
|
||||
|
||||
if (!queue.length) {
|
||||
return (
|
||||
<div className="queue-empty">
|
||||
<div className="empty-icon">🎵</div>
|
||||
<div>Select an album to start</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{order.map((origIdx, pos) => {
|
||||
const t = queue[origIdx]
|
||||
if (!t) return null
|
||||
|
||||
const isPlaying = origIdx === playingOrigIdx
|
||||
const coverSrc = t.album_slug ? `${apiRoot}/tracks/${t.slug}/cover` : ''
|
||||
const dur = t.duration ? fmt(t.duration) : ''
|
||||
const isDragging = draggingPos === pos
|
||||
const isDragOver = dragOverPos === pos
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`${t.slug}:${pos}`}
|
||||
ref={isPlaying ? playingRef : null}
|
||||
className={`queue-item${isPlaying ? ' playing' : ''}${isDragging ? ' dragging' : ''}${
|
||||
isDragOver ? ' drag-over' : ''
|
||||
}`}
|
||||
draggable
|
||||
onClick={() => onPlay(origIdx)}
|
||||
onDragStart={(e) => {
|
||||
setDraggingPos(pos)
|
||||
e.dataTransfer?.setData('text/plain', String(pos))
|
||||
}}
|
||||
onDragEnd={() => {
|
||||
setDraggingPos(null)
|
||||
setDragOverPos(null)
|
||||
}}
|
||||
onDragOver={(e) => {
|
||||
e.preventDefault()
|
||||
}}
|
||||
onDragEnter={() => {
|
||||
setDragOverPos(pos)
|
||||
}}
|
||||
onDragLeave={() => {
|
||||
setDragOverPos((cur) => (cur === pos ? null : cur))
|
||||
}}
|
||||
onDrop={(e) => {
|
||||
e.preventDefault()
|
||||
setDragOverPos(null)
|
||||
const from = parseInt(e.dataTransfer?.getData('text/plain') ?? '', 10)
|
||||
if (!Number.isNaN(from)) onMove(from, pos)
|
||||
setDraggingPos(null)
|
||||
}}
|
||||
>
|
||||
<span className="qi-index">{isPlaying ? '' : pos + 1}</span>
|
||||
<div className="qi-cover">
|
||||
{coverSrc ? <Cover src={coverSrc} /> : <>🎵</>}
|
||||
</div>
|
||||
<div className="qi-info">
|
||||
<div className="qi-title">{t.title}</div>
|
||||
<div className="qi-artist">{t.artist || ''}</div>
|
||||
</div>
|
||||
<span className="qi-dur">{dur}</span>
|
||||
<button
|
||||
className="qi-remove"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onRemove(origIdx)
|
||||
}}
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
30
furumi-node-player/client/src/components/SearchDropdown.tsx
Normal file
30
furumi-node-player/client/src/components/SearchDropdown.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
type SearchResultItem = {
|
||||
result_type: string
|
||||
slug: string
|
||||
name: string
|
||||
detail?: string
|
||||
}
|
||||
|
||||
type SearchDropdownProps = {
|
||||
isOpen: boolean
|
||||
results: SearchResultItem[]
|
||||
onSelect: (type: string, slug: string) => void
|
||||
}
|
||||
|
||||
export function SearchDropdown({ isOpen, results, onSelect }: SearchDropdownProps) {
|
||||
return (
|
||||
<div className={`search-dropdown${isOpen ? ' open' : ''}`}>
|
||||
{results.map((r) => (
|
||||
<div
|
||||
key={`${r.result_type}:${r.slug}`}
|
||||
className="search-result"
|
||||
onClick={() => onSelect(r.result_type, r.slug)}
|
||||
>
|
||||
<span className="sr-type">{r.result_type}</span>
|
||||
{r.name}
|
||||
{r.detail ? <span className="sr-detail">{r.detail}</span> : null}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
754
furumi-node-player/client/src/furumi-player.css
Normal file
754
furumi-node-player/client/src/furumi-player.css
Normal file
@@ -0,0 +1,754 @@
|
||||
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap');
|
||||
|
||||
.furumi-root,
|
||||
.furumi-root * {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.furumi-root {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: 'Inter', system-ui, sans-serif;
|
||||
}
|
||||
|
||||
:root {
|
||||
--bg-base: #0a0c12;
|
||||
--bg-panel: #111520;
|
||||
--bg-card: #161d2e;
|
||||
--bg-hover: #1e2740;
|
||||
--bg-active: #252f4a;
|
||||
--border: #1f2c45;
|
||||
--accent: #7c6af7;
|
||||
--accent-dim: #5a4fcf;
|
||||
--accent-glow: rgba(124, 106, 247, 0.3);
|
||||
--text: #e2e8f0;
|
||||
--text-muted: #64748b;
|
||||
--text-dim: #94a3b8;
|
||||
--success: #34d399;
|
||||
--danger: #f87171;
|
||||
}
|
||||
|
||||
.header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 0.75rem 1.5rem;
|
||||
background: var(--bg-panel);
|
||||
border-bottom: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.header-logo {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
font-weight: 700;
|
||||
font-size: 1.1rem;
|
||||
}
|
||||
|
||||
.header-logo svg {
|
||||
width: 22px;
|
||||
height: 22px;
|
||||
}
|
||||
|
||||
.header-version {
|
||||
font-size: 0.7rem;
|
||||
color: var(--text-muted);
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
padding: 0.1rem 0.4rem;
|
||||
border-radius: 4px;
|
||||
margin-left: 0.25rem;
|
||||
font-weight: 500;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.btn-menu {
|
||||
display: none;
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text);
|
||||
font-size: 1.2rem;
|
||||
cursor: pointer;
|
||||
padding: 0.1rem 0.5rem;
|
||||
margin-right: 0.2rem;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.search-wrap {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.search-wrap input {
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 6px;
|
||||
padding: 6px 12px 6px 30px;
|
||||
color: var(--text);
|
||||
font-size: 13px;
|
||||
width: 220px;
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
.search-wrap::before {
|
||||
content: '🔍';
|
||||
position: absolute;
|
||||
left: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.search-dropdown {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 0 0 6px 6px;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 50;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.search-dropdown.open {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.search-result {
|
||||
padding: 8px 12px;
|
||||
cursor: pointer;
|
||||
font-size: 13px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.search-result:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.search-result .sr-type {
|
||||
font-size: 10px;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
margin-right: 6px;
|
||||
}
|
||||
|
||||
.search-result .sr-detail {
|
||||
font-size: 11px;
|
||||
color: var(--text-muted);
|
||||
margin-left: 4px;
|
||||
}
|
||||
|
||||
.main {
|
||||
display: flex;
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
background: var(--bg-base);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.sidebar-overlay {
|
||||
display: none;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.6);
|
||||
z-index: 20;
|
||||
}
|
||||
|
||||
.sidebar-overlay.show {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
width: 280px;
|
||||
min-width: 200px;
|
||||
max-width: 400px;
|
||||
flex-shrink: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background: var(--bg-panel);
|
||||
border-right: 1px solid var(--border);
|
||||
overflow: hidden;
|
||||
resize: horizontal;
|
||||
}
|
||||
|
||||
.sidebar-header {
|
||||
padding: 0.85rem 1rem 0.6rem;
|
||||
font-size: 0.7rem;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.08em;
|
||||
text-transform: uppercase;
|
||||
color: var(--text-muted);
|
||||
border-bottom: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.breadcrumb {
|
||||
padding: 0.5rem 1rem;
|
||||
font-size: 0.78rem;
|
||||
color: var(--text-muted);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
border-bottom: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.breadcrumb span {
|
||||
color: var(--accent);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.breadcrumb span:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.file-list {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.file-list::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
.file-list::-webkit-scrollbar-thumb {
|
||||
background: var(--border);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.file-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.6rem;
|
||||
padding: 0.45rem 1rem;
|
||||
cursor: pointer;
|
||||
font-size: 0.875rem;
|
||||
color: var(--text-dim);
|
||||
user-select: none;
|
||||
transition: background 0.12s;
|
||||
}
|
||||
|
||||
.file-item:hover {
|
||||
background: var(--bg-hover);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.file-item.dir {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.file-item .icon {
|
||||
font-size: 0.95rem;
|
||||
flex-shrink: 0;
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.file-item .name {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.file-item .detail {
|
||||
font-size: 0.7rem;
|
||||
color: var(--text-muted);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.file-item .add-btn {
|
||||
opacity: 0;
|
||||
font-size: 0.75rem;
|
||||
background: var(--bg-hover);
|
||||
color: var(--text);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 4px;
|
||||
padding: 0.2rem 0.4rem;
|
||||
cursor: pointer;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.file-item:hover .add-btn {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.file-item .add-btn:hover {
|
||||
background: var(--accent);
|
||||
color: #fff;
|
||||
border-color: var(--accent);
|
||||
}
|
||||
|
||||
.queue-panel {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
background: var(--bg-base);
|
||||
}
|
||||
|
||||
.queue-header {
|
||||
padding: 0.85rem 1.25rem 0.6rem;
|
||||
font-size: 0.7rem;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.08em;
|
||||
text-transform: uppercase;
|
||||
color: var(--text-muted);
|
||||
border-bottom: 1px solid var(--border);
|
||||
flex-shrink: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.queue-actions {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.queue-btn {
|
||||
font-size: 0.7rem;
|
||||
padding: 0.2rem 0.55rem;
|
||||
background: none;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 5px;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.queue-btn:hover {
|
||||
border-color: var(--accent);
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.queue-btn.active {
|
||||
background: var(--accent);
|
||||
border-color: var(--accent);
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.queue-list {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.queue-list::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
.queue-list::-webkit-scrollbar-thumb {
|
||||
background: var(--border);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.queue-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
padding: 0.55rem 1.25rem;
|
||||
cursor: pointer;
|
||||
border-left: 2px solid transparent;
|
||||
transition: background 0.12s;
|
||||
}
|
||||
|
||||
.queue-item:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.queue-item.playing {
|
||||
background: var(--bg-active);
|
||||
border-left-color: var(--accent);
|
||||
}
|
||||
|
||||
.queue-item.playing .qi-title {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.queue-item .qi-index {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
width: 1.5rem;
|
||||
text-align: right;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.queue-item.playing .qi-index::before {
|
||||
content: '▶';
|
||||
font-size: 0.6rem;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.queue-item .qi-cover {
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
border-radius: 5px;
|
||||
background: var(--bg-card);
|
||||
flex-shrink: 0;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 1.1rem;
|
||||
}
|
||||
|
||||
.queue-item .qi-cover img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.queue-item .qi-info {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.queue-item .qi-title {
|
||||
font-size: 0.875rem;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.queue-item .qi-artist {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.queue-item .qi-dur {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
margin-left: auto;
|
||||
margin-right: 0.5rem;
|
||||
}
|
||||
|
||||
.qi-remove {
|
||||
background: none;
|
||||
border: none;
|
||||
font-size: 0.9rem;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
padding: 0.3rem;
|
||||
border-radius: 4px;
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
.queue-item:hover .qi-remove {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.qi-remove:hover {
|
||||
background: rgba(248, 113, 113, 0.15);
|
||||
color: var(--danger);
|
||||
}
|
||||
|
||||
.queue-item.dragging {
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
.queue-item.drag-over {
|
||||
border-top: 2px solid var(--accent);
|
||||
margin-top: -2px;
|
||||
}
|
||||
|
||||
.queue-empty {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: var(--text-muted);
|
||||
font-size: 0.875rem;
|
||||
gap: 0.5rem;
|
||||
padding: 2rem;
|
||||
}
|
||||
|
||||
.queue-empty .empty-icon {
|
||||
font-size: 2.5rem;
|
||||
opacity: 0.3;
|
||||
}
|
||||
|
||||
.player-bar {
|
||||
background: var(--bg-panel);
|
||||
border-top: 1px solid var(--border);
|
||||
padding: 0.9rem 1.5rem;
|
||||
flex-shrink: 0;
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 2fr 1fr;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.np-info {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.np-cover {
|
||||
width: 44px;
|
||||
height: 44px;
|
||||
border-radius: 6px;
|
||||
background: var(--bg-card);
|
||||
flex-shrink: 0;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 1.3rem;
|
||||
}
|
||||
|
||||
.np-cover img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.np-text {
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.np-title {
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.np-artist {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.ctrl-btns {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.ctrl-btn {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text-dim);
|
||||
cursor: pointer;
|
||||
padding: 0.35rem;
|
||||
border-radius: 50%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.ctrl-btn:hover {
|
||||
color: var(--text);
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
.ctrl-btn.active {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.ctrl-btn-main {
|
||||
width: 38px;
|
||||
height: 38px;
|
||||
background: var(--accent);
|
||||
color: #fff !important;
|
||||
font-size: 1.1rem;
|
||||
box-shadow: 0 0 14px var(--accent-glow);
|
||||
}
|
||||
|
||||
.ctrl-btn-main:hover {
|
||||
background: var(--accent-dim) !important;
|
||||
}
|
||||
|
||||
.progress-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.6rem;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.time {
|
||||
font-size: 0.7rem;
|
||||
color: var(--text-muted);
|
||||
flex-shrink: 0;
|
||||
font-variant-numeric: tabular-nums;
|
||||
min-width: 2.5rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.progress-bar {
|
||||
flex: 1;
|
||||
height: 4px;
|
||||
background: var(--bg-hover);
|
||||
border-radius: 2px;
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.progress-fill {
|
||||
height: 100%;
|
||||
background: var(--accent);
|
||||
border-radius: 2px;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.progress-fill::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
right: -5px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent);
|
||||
box-shadow: 0 0 6px var(--accent-glow);
|
||||
opacity: 0;
|
||||
transition: opacity 0.15s;
|
||||
}
|
||||
|
||||
.progress-bar:hover .progress-fill::after {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.volume-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.vol-icon {
|
||||
font-size: 0.9rem;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.volume-slider {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 80px;
|
||||
height: 4px;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-hover);
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.volume-slider::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.spinner {
|
||||
display: inline-block;
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
border: 2px solid var(--border);
|
||||
border-top-color: var(--accent);
|
||||
border-radius: 50%;
|
||||
animation: spin 0.7s linear infinite;
|
||||
}
|
||||
|
||||
.toast {
|
||||
position: fixed;
|
||||
bottom: 90px;
|
||||
right: 1.5rem;
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 0.6rem 1rem;
|
||||
font-size: 0.8rem;
|
||||
color: var(--text-dim);
|
||||
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
|
||||
opacity: 0;
|
||||
transform: translateY(8px);
|
||||
transition: all 0.25s;
|
||||
pointer-events: none;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.toast.show {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.btn-menu {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.header {
|
||||
padding: 0.75rem 1rem;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: -100%;
|
||||
width: 85%;
|
||||
max-width: 320px;
|
||||
z-index: 30;
|
||||
transition: left 0.3s;
|
||||
box-shadow: 4px 0 20px rgba(0, 0, 0, 0.6);
|
||||
}
|
||||
|
||||
.sidebar.open {
|
||||
left: 0;
|
||||
}
|
||||
|
||||
.player-bar {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 0.75rem;
|
||||
padding: 0.75rem 1rem;
|
||||
}
|
||||
|
||||
.volume-row {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.search-wrap input {
|
||||
width: 140px;
|
||||
}
|
||||
}
|
||||
|
||||
12
furumi-node-player/client/src/furumiApi.ts
Normal file
12
furumi-node-player/client/src/furumiApi.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export type FurumiApiClient = (path: string) => Promise<unknown | null>
|
||||
|
||||
export function createFurumiApiClient(apiRoot: string): FurumiApiClient {
|
||||
const API = apiRoot
|
||||
|
||||
return async function api(path: string) {
|
||||
const r = await fetch(API + path)
|
||||
if (!r.ok) return null
|
||||
return r.json()
|
||||
}
|
||||
}
|
||||
|
||||
15
furumi-node-player/client/src/index.css
Normal file
15
furumi-node-player/client/src/index.css
Normal file
@@ -0,0 +1,15 @@
|
||||
body {
|
||||
margin: 0;
|
||||
font-family: Inter, -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
|
||||
color: #0f172a;
|
||||
background-color: #f3f6fb;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin-top: 0;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
10
furumi-node-player/client/src/main.tsx
Normal file
10
furumi-node-player/client/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.tsx'
|
||||
|
||||
createRoot(document.getElementById('root')!).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
28
furumi-node-player/client/tsconfig.app.json
Normal file
28
furumi-node-player/client/tsconfig.app.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||
"target": "ES2023",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2023", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"types": ["vite/client"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
7
furumi-node-player/client/tsconfig.json
Normal file
7
furumi-node-player/client/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"files": [],
|
||||
"references": [
|
||||
{ "path": "./tsconfig.app.json" },
|
||||
{ "path": "./tsconfig.node.json" }
|
||||
]
|
||||
}
|
||||
26
furumi-node-player/client/tsconfig.node.json
Normal file
26
furumi-node-player/client/tsconfig.node.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||
"target": "ES2023",
|
||||
"lib": ["ES2023"],
|
||||
"module": "ESNext",
|
||||
"types": ["node"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
15
furumi-node-player/client/vite.config.ts
Normal file
15
furumi-node-player/client/vite.config.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
server: {
|
||||
proxy: {
|
||||
'/api': {
|
||||
target: 'http://localhost:3001',
|
||||
changeOrigin: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
12
furumi-node-player/server/.env.example
Normal file
12
furumi-node-player/server/.env.example
Normal file
@@ -0,0 +1,12 @@
|
||||
PORT=3001
|
||||
BASE_URL=http://localhost:3001
|
||||
FRONTEND_ORIGIN=http://localhost:5173
|
||||
SESSION_SECRET=super-long-random-secret
|
||||
|
||||
# Если true/1/on/yes — сервер стартует без OIDC и не требует авторизации.
|
||||
DISABLE_AUTH=false
|
||||
|
||||
OIDC_ISSUER_BASE_URL=https://your-issuer.example.com
|
||||
OIDC_CLIENT_ID=your-client-id
|
||||
OIDC_CLIENT_SECRET=your-client-secret
|
||||
OIDC_SCOPE="openid profile email"
|
||||
24
furumi-node-player/server/.gitignore
vendored
Normal file
24
furumi-node-player/server/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
3355
furumi-node-player/server/package-lock.json
generated
Normal file
3355
furumi-node-player/server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
furumi-node-player/server/package.json
Normal file
28
furumi-node-player/server/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "server",
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"build": "tsc -p tsconfig.json",
|
||||
"start": "node dist/index.js"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"cors": "^2.8.6",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"express-openid-connect": "^2.19.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.19",
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^25.5.0",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
103
furumi-node-player/server/src/index.ts
Normal file
103
furumi-node-player/server/src/index.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import 'dotenv/config';
|
||||
|
||||
import cors from 'cors';
|
||||
import express from 'express';
|
||||
import { auth } from 'express-openid-connect';
|
||||
|
||||
const app = express();
|
||||
|
||||
const port = Number(process.env.PORT ?? 3001);
|
||||
const frontendOrigin = process.env.FRONTEND_ORIGIN ?? 'http://localhost:5173';
|
||||
|
||||
const disableAuth = ['1', 'true', 'yes', 'on'].includes(
|
||||
String(process.env.DISABLE_AUTH ?? '').trim().toLowerCase(),
|
||||
);
|
||||
|
||||
const oidcConfig = {
|
||||
authRequired: false,
|
||||
auth0Logout: false,
|
||||
secret: process.env.SESSION_SECRET ?? 'change-me-in-env',
|
||||
baseURL: process.env.BASE_URL ?? `http://localhost:${port}`,
|
||||
clientID: process.env.OIDC_CLIENT_ID ?? '',
|
||||
issuerBaseURL: process.env.OIDC_ISSUER_BASE_URL ?? '',
|
||||
clientSecret: process.env.OIDC_CLIENT_SECRET ?? '',
|
||||
authorizationParams: {
|
||||
response_type: 'code',
|
||||
scope: process.env.OIDC_SCOPE ?? 'openid profile email',
|
||||
},
|
||||
};
|
||||
|
||||
if (!disableAuth && (!oidcConfig.clientID || !oidcConfig.issuerBaseURL || !oidcConfig.clientSecret)) {
|
||||
// Keep a clear startup failure if OIDC is not configured.
|
||||
throw new Error(
|
||||
'OIDC config is missing. Set OIDC_ISSUER_BASE_URL, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET in server/.env (or set DISABLE_AUTH=true)',
|
||||
);
|
||||
}
|
||||
|
||||
app.use(
|
||||
cors({
|
||||
origin: frontendOrigin,
|
||||
credentials: true,
|
||||
}),
|
||||
);
|
||||
app.use(express.json());
|
||||
|
||||
if (!disableAuth) {
|
||||
app.use(auth(oidcConfig));
|
||||
}
|
||||
|
||||
app.get('/api/health', (_req, res) => {
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
app.get('/api/me', (req, res) => {
|
||||
if (disableAuth) {
|
||||
res.json({
|
||||
authenticated: false,
|
||||
bypassAuth: true,
|
||||
user: {
|
||||
sub: 'noauth',
|
||||
name: 'No Auth',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!req.oidc.isAuthenticated()) {
|
||||
res.status(401).json({ authenticated: false });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
authenticated: true,
|
||||
user: req.oidc.user,
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/api/login', (req, res) => {
|
||||
if (disableAuth) {
|
||||
res.status(204).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.oidc.login({
|
||||
returnTo: frontendOrigin,
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/api/logout', (req, res) => {
|
||||
if (disableAuth) {
|
||||
res.status(204).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.oidc.logout({
|
||||
returnTo: frontendOrigin,
|
||||
});
|
||||
});
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(
|
||||
`${disableAuth ? 'NO-AUTH' : 'OIDC auth'} server listening on http://localhost:${port}`,
|
||||
);
|
||||
});
|
||||
14
furumi-node-player/server/tsconfig.json
Normal file
14
furumi-node-player/server/tsconfig.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"skipLibCheck": true,
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "furumi-server"
|
||||
version = "0.2.1"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
@@ -18,15 +18,31 @@ rustls = { version = "0.23.37", features = ["ring"] }
|
||||
thiserror = "2.0.18"
|
||||
tokio = { version = "1.50.0", features = ["full"] }
|
||||
tokio-stream = "0.1.18"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
tonic = { version = "0.12.3", features = ["tls"] }
|
||||
tracing = "0.1.44"
|
||||
tracing-subscriber = { version = "0.3.22", features = ["env-filter"] }
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
prometheus = { version = "0.14.0", features = ["process"] }
|
||||
axum = { version = "0.7", features = ["tokio"] }
|
||||
axum = { version = "0.7", features = ["tokio", "macros"] }
|
||||
once_cell = "1.21.3"
|
||||
rcgen = { version = "0.14.7", features = ["pem"] }
|
||||
symphonia = { version = "0.5", default-features = false, features = ["mp3", "aac", "flac", "vorbis", "wav", "alac", "adpcm", "pcm", "mpa", "isomp4", "ogg", "aiff", "mkv"] }
|
||||
opus = "0.3"
|
||||
ogg = "0.9"
|
||||
mime_guess = "2.0"
|
||||
tower = { version = "0.4", features = ["util"] }
|
||||
sha2 = "0.10"
|
||||
base64 = "0.22"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
openidconnect = "3.4"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] }
|
||||
hmac = "0.12"
|
||||
rand = "0.8"
|
||||
encoding_rs = "0.8"
|
||||
urlencoding = "2.1.3"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.26.0"
|
||||
|
||||
@@ -2,6 +2,7 @@ pub mod vfs;
|
||||
pub mod security;
|
||||
pub mod server;
|
||||
pub mod metrics;
|
||||
pub mod web;
|
||||
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
@@ -33,9 +34,37 @@ struct Args {
|
||||
#[arg(long, env = "FURUMI_METRICS_BIND", default_value = "0.0.0.0:9090")]
|
||||
metrics_bind: String,
|
||||
|
||||
/// IP address and port for the web music player
|
||||
#[arg(long, env = "FURUMI_WEB_BIND", default_value = "0.0.0.0:8080")]
|
||||
web_bind: String,
|
||||
|
||||
/// Disable the web music player UI
|
||||
#[arg(long, default_value_t = false)]
|
||||
no_web: bool,
|
||||
|
||||
/// Disable TLS encryption (not recommended, use only for debugging)
|
||||
#[arg(long, default_value_t = false)]
|
||||
no_tls: bool,
|
||||
|
||||
/// OIDC Issuer URL (e.g. https://auth.example.com/application/o/furumi/)
|
||||
#[arg(long, env = "FURUMI_OIDC_ISSUER_URL")]
|
||||
oidc_issuer_url: Option<String>,
|
||||
|
||||
/// OIDC Client ID
|
||||
#[arg(long, env = "FURUMI_OIDC_CLIENT_ID")]
|
||||
oidc_client_id: Option<String>,
|
||||
|
||||
/// OIDC Client Secret
|
||||
#[arg(long, env = "FURUMI_OIDC_CLIENT_SECRET")]
|
||||
oidc_client_secret: Option<String>,
|
||||
|
||||
/// OIDC Redirect URL (e.g. https://music.example.com/auth/callback)
|
||||
#[arg(long, env = "FURUMI_OIDC_REDIRECT_URL")]
|
||||
oidc_redirect_url: Option<String>,
|
||||
|
||||
/// OIDC Session Secret (32+ chars, for HMAC). If not provided, a random one is generated on startup.
|
||||
#[arg(long, env = "FURUMI_OIDC_SESSION_SECRET")]
|
||||
oidc_session_secret: Option<String>,
|
||||
}
|
||||
|
||||
async fn metrics_handler() -> String {
|
||||
@@ -79,7 +108,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let svc = RemoteFileSystemServer::with_interceptor(remote_fs, auth.clone());
|
||||
|
||||
// Print startup info
|
||||
println!("Furumi-ng Server listening on {}", addr);
|
||||
println!("Furumi-ng Server v{} listening on {}", option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION")), addr);
|
||||
if args.no_tls {
|
||||
println!("WARNING: TLS is DISABLED — traffic is unencrypted");
|
||||
} else {
|
||||
@@ -91,7 +120,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("Authentication: enabled (Bearer token)");
|
||||
}
|
||||
println!("Document Root: {:?}", root_path);
|
||||
println!("Metrics: http://{}/metrics", metrics_addr);
|
||||
println!("Metrics: http://{}/metrics", metrics_addr);
|
||||
|
||||
// Spawn the Prometheus metrics HTTP server on a separate port
|
||||
let metrics_app = Router::new().route("/metrics", get(metrics_handler));
|
||||
@@ -100,6 +129,40 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
axum::serve(metrics_listener, metrics_app).await.unwrap();
|
||||
});
|
||||
|
||||
// Spawn the web music player on its own port
|
||||
if !args.no_web {
|
||||
let web_addr: SocketAddr = args.web_bind.parse().unwrap_or_else(|e| {
|
||||
eprintln!("Error: Invalid web bind address '{}': {}", args.web_bind, e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Initialize OIDC State if provided
|
||||
let oidc_state = if let (Some(issuer), Some(client_id), Some(secret), Some(redirect)) = (
|
||||
args.oidc_issuer_url,
|
||||
args.oidc_client_id,
|
||||
args.oidc_client_secret,
|
||||
args.oidc_redirect_url,
|
||||
) {
|
||||
println!("OIDC (SSO): enabled for web UI (issuer: {})", issuer);
|
||||
match web::auth::oidc_init(issuer, client_id, secret, redirect, args.oidc_session_secret).await {
|
||||
Ok(state) => Some(Arc::new(state)),
|
||||
Err(e) => {
|
||||
eprintln!("Error initializing OIDC client: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let web_app = web::build_router(root_path.clone(), args.token.clone(), oidc_state);
|
||||
let web_listener = tokio::net::TcpListener::bind(web_addr).await?;
|
||||
println!("Web player: http://{}", web_addr);
|
||||
tokio::spawn(async move {
|
||||
axum::serve(web_listener, web_app).await.unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
let mut builder = Server::builder()
|
||||
.tcp_keepalive(Some(std::time::Duration::from_secs(60)))
|
||||
.http2_keepalive_interval(Some(std::time::Duration::from_secs(60)));
|
||||
|
||||
521
furumi-server/src/web/auth.rs
Normal file
521
furumi-server/src/web/auth.rs
Normal file
@@ -0,0 +1,521 @@
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Form, Request, State},
|
||||
http::{header, HeaderMap, StatusCode},
|
||||
middleware::Next,
|
||||
response::{Html, IntoResponse, Redirect, Response},
|
||||
};
|
||||
use openidconnect::{
|
||||
core::{CoreClient, CoreProviderMetadata, CoreResponseType},
|
||||
reqwest::async_http_client,
|
||||
AuthenticationFlow, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
||||
PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, Scope, TokenResponse,
|
||||
};
|
||||
use rand::RngCore;
|
||||
use serde::Deserialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
use base64::Engine;
|
||||
use hmac::{Hmac, Mac};
|
||||
|
||||
use super::{OidcState, WebState};
|
||||
|
||||
/// Cookie name used to store the session token.
|
||||
const SESSION_COOKIE: &str = "furumi_session";
|
||||
|
||||
fn esc(s: &str) -> String {
|
||||
s.replace('&', "&")
|
||||
.replace('<', "<")
|
||||
.replace('>', ">")
|
||||
.replace('"', """)
|
||||
.replace('\'', "'")
|
||||
}
|
||||
|
||||
/// Compute SHA-256 of the token as hex string (stored in cookie, not raw token).
|
||||
pub fn token_hash(token: &str) -> String {
|
||||
let mut h = Sha256::new();
|
||||
h.update(token.as_bytes());
|
||||
format!("{:x}", h.finalize())
|
||||
}
|
||||
|
||||
pub async fn require_auth(
|
||||
State(state): State<WebState>,
|
||||
mut req: Request,
|
||||
next: Next,
|
||||
) -> Response {
|
||||
// Auth disabled when token is empty
|
||||
if state.token.is_empty() {
|
||||
req.extensions_mut().insert(super::AuthUserInfo("Unauthenticated".to_string()));
|
||||
return next.run(req).await;
|
||||
}
|
||||
|
||||
let cookies = req
|
||||
.headers()
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
let expected = token_hash(&state.token);
|
||||
let mut authed_user = None;
|
||||
for c in cookies.split(';') {
|
||||
let c = c.trim();
|
||||
if let Some(val) = c.strip_prefix(&format!("{}=", SESSION_COOKIE)) {
|
||||
if val == expected {
|
||||
authed_user = Some("Master Token".to_string());
|
||||
break;
|
||||
} else if let Some(oidc) = &state.oidc {
|
||||
if let Some(user) = verify_sso_cookie(&oidc.session_secret, val) {
|
||||
authed_user = Some(user);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(user) = authed_user {
|
||||
req.extensions_mut().insert(super::AuthUserInfo(user));
|
||||
next.run(req).await
|
||||
} else {
|
||||
let uri = req.uri().to_string();
|
||||
if uri.starts_with("/api/") {
|
||||
(StatusCode::UNAUTHORIZED, "Unauthorized").into_response()
|
||||
} else {
|
||||
let redirect_url = format!("/login?next={}", urlencoding::encode(&uri));
|
||||
Redirect::to(&redirect_url).into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type HmacSha256 = Hmac<sha2::Sha256>;
|
||||
|
||||
pub fn generate_sso_cookie(secret: &[u8], user_id: &str) -> String {
|
||||
let mut mac = HmacSha256::new_from_slice(secret).unwrap();
|
||||
mac.update(user_id.as_bytes());
|
||||
let sig = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
|
||||
format!("sso:{}:{}", user_id, sig)
|
||||
}
|
||||
|
||||
pub fn verify_sso_cookie(secret: &[u8], cookie_val: &str) -> Option<String> {
|
||||
let parts: Vec<&str> = cookie_val.split(':').collect();
|
||||
if parts.len() != 3 || parts[0] != "sso" {
|
||||
return None;
|
||||
}
|
||||
let user_id = parts[1];
|
||||
let sig = parts[2];
|
||||
|
||||
let mut mac = HmacSha256::new_from_slice(secret).unwrap();
|
||||
mac.update(user_id.as_bytes());
|
||||
|
||||
let expected_sig = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
|
||||
if sig == expected_sig {
|
||||
Some(user_id.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LoginQuery {
|
||||
pub next: Option<String>,
|
||||
}
|
||||
|
||||
/// GET /login — show login form.
|
||||
pub async fn login_page(
|
||||
State(state): State<WebState>,
|
||||
axum::extract::Query(query): axum::extract::Query<LoginQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let token_enabled = !state.token.is_empty();
|
||||
let oidc_enabled = state.oidc.is_some();
|
||||
|
||||
if !token_enabled && !oidc_enabled {
|
||||
return Redirect::to("/").into_response();
|
||||
}
|
||||
|
||||
let next_val = query.next.unwrap_or_else(|| "/".to_string());
|
||||
let next_encoded = urlencoding::encode(&next_val);
|
||||
|
||||
let oidc_html = if oidc_enabled {
|
||||
format!(
|
||||
r#"<div class="divider"><span>OR</span></div>
|
||||
<a href="/auth/login?next={}" class="btn-oidc">Log in with Authentik (SSO)</a>"#,
|
||||
next_encoded
|
||||
)
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let next_input = format!(r#"<input type="hidden" name="next" value="{}">"#, esc(&next_val));
|
||||
|
||||
let html = LOGIN_HTML
|
||||
.replace("<!-- OIDC_PLACEHOLDER -->", &oidc_html)
|
||||
.replace("<!-- NEXT_INPUT_PLACEHOLDER -->", &next_input);
|
||||
Html(html).into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LoginForm {
|
||||
password: String,
|
||||
next: Option<String>,
|
||||
}
|
||||
|
||||
/// POST /login — validate password, set session cookie.
|
||||
pub async fn login_submit(
|
||||
State(state): State<WebState>,
|
||||
Form(form): Form<LoginForm>,
|
||||
) -> impl IntoResponse {
|
||||
if state.token.is_empty() {
|
||||
return Redirect::to("/").into_response();
|
||||
}
|
||||
|
||||
if form.password == *state.token {
|
||||
let hash = token_hash(&state.token);
|
||||
let cookie = format!(
|
||||
"{}={}; HttpOnly; SameSite=Strict; Path=/; Max-Age=604800",
|
||||
SESSION_COOKIE, hash
|
||||
);
|
||||
let redirect_to = form.next.as_deref().unwrap_or("/");
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, cookie.parse().unwrap());
|
||||
headers.insert(header::LOCATION, redirect_to.parse().unwrap());
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
} else {
|
||||
Html(LOGIN_ERROR_HTML).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
/// GET /logout — clear session cookie and redirect to login.
|
||||
pub async fn logout() -> impl IntoResponse {
|
||||
let cookie = format!(
|
||||
"{}=; HttpOnly; SameSite=Strict; Path=/; Expires=Thu, 01 Jan 1970 00:00:00 GMT",
|
||||
SESSION_COOKIE
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, cookie.parse().unwrap());
|
||||
headers.insert(header::LOCATION, "/login".parse().unwrap());
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
}
|
||||
|
||||
pub async fn oidc_init(
|
||||
issuer: String,
|
||||
client_id: String,
|
||||
client_secret: String,
|
||||
redirect: String,
|
||||
session_secret_override: Option<String>,
|
||||
) -> anyhow::Result<OidcState> {
|
||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||
IssuerUrl::new(issuer)?,
|
||||
async_http_client,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let client = CoreClient::from_provider_metadata(
|
||||
provider_metadata,
|
||||
ClientId::new(client_id),
|
||||
Some(ClientSecret::new(client_secret)),
|
||||
)
|
||||
.set_auth_type(openidconnect::AuthType::RequestBody)
|
||||
.set_redirect_uri(RedirectUrl::new(redirect)?);
|
||||
|
||||
let session_secret = if let Some(s) = session_secret_override {
|
||||
let mut b = s.into_bytes();
|
||||
b.resize(32, 0); // Ensure at least 32 bytes for HMAC-SHA256
|
||||
b
|
||||
} else {
|
||||
let mut b = vec![0u8; 32];
|
||||
rand::thread_rng().fill_bytes(&mut b);
|
||||
b
|
||||
};
|
||||
|
||||
Ok(OidcState {
|
||||
client,
|
||||
session_secret,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn oidc_login(
|
||||
State(state): State<WebState>,
|
||||
axum::extract::Query(query): axum::extract::Query<LoginQuery>,
|
||||
req: Request,
|
||||
) -> impl IntoResponse {
|
||||
let oidc = match &state.oidc {
|
||||
Some(o) => o,
|
||||
None => return Redirect::to("/login").into_response(),
|
||||
};
|
||||
|
||||
let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
|
||||
|
||||
let (auth_url, csrf_token, nonce) = oidc
|
||||
.client
|
||||
.authorize_url(
|
||||
AuthenticationFlow::<CoreResponseType>::AuthorizationCode,
|
||||
CsrfToken::new_random,
|
||||
Nonce::new_random,
|
||||
)
|
||||
.add_scope(Scope::new("openid".to_string()))
|
||||
.add_scope(Scope::new("profile".to_string()))
|
||||
.set_pkce_challenge(pkce_challenge)
|
||||
.url();
|
||||
|
||||
let next_url = query.next.unwrap_or_else(|| "/".to_string());
|
||||
let cookie_val = format!("{}:{}:{}:{}", csrf_token.secret(), nonce.secret(), pkce_verifier.secret(), urlencoding::encode(&next_url));
|
||||
|
||||
// Determine if we are running behind an HTTPS proxy
|
||||
let is_https = req.headers().get("x-forwarded-proto")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| s == "https")
|
||||
.unwrap_or(false);
|
||||
|
||||
// If HTTPS, use SameSite=None + Secure to fully support cross-domain POST redirects.
|
||||
// Otherwise fallback to Lax for local testing.
|
||||
let cookie_attrs = if is_https {
|
||||
"SameSite=None; Secure"
|
||||
} else {
|
||||
"SameSite=Lax"
|
||||
};
|
||||
|
||||
let cookie = format!("furumi_oidc_state={}; HttpOnly; {}; Path=/; Max-Age=3600", cookie_val, cookie_attrs);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, cookie.parse().unwrap());
|
||||
headers.insert(header::LOCATION, auth_url.as_str().parse().unwrap());
|
||||
headers.insert(header::CACHE_CONTROL, "no-store, no-cache, must-revalidate".parse().unwrap());
|
||||
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthCallbackQuery {
|
||||
code: String,
|
||||
state: String,
|
||||
}
|
||||
|
||||
pub async fn oidc_callback(
|
||||
State(state): State<WebState>,
|
||||
axum::extract::Query(query): axum::extract::Query<AuthCallbackQuery>,
|
||||
req: Request,
|
||||
) -> impl IntoResponse {
|
||||
let oidc = match &state.oidc {
|
||||
Some(o) => o,
|
||||
None => return Redirect::to("/login").into_response(),
|
||||
};
|
||||
|
||||
let cookies = req
|
||||
.headers()
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
let mut matching_val = None;
|
||||
for c in cookies.split(';') {
|
||||
let c = c.trim();
|
||||
if let Some(val) = c.strip_prefix("furumi_oidc_state=") {
|
||||
let parts: Vec<&str> = val.split(':').collect();
|
||||
if parts.len() >= 3 && parts[0] == query.state {
|
||||
matching_val = Some(val.to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let cookie_val = match matching_val {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
tracing::warn!("OIDC callback failed: Invalid state or missing valid cookie. Received cookies: {}", cookies);
|
||||
return (StatusCode::BAD_REQUEST, "Invalid state").into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let parts: Vec<&str> = cookie_val.split(':').collect();
|
||||
let nonce = Nonce::new(parts[1].to_string());
|
||||
let pkce_verifier = PkceCodeVerifier::new(parts[2].to_string());
|
||||
|
||||
let token_response = oidc
|
||||
.client
|
||||
.exchange_code(AuthorizationCode::new(query.code))
|
||||
.set_pkce_verifier(pkce_verifier)
|
||||
.request_async(async_http_client)
|
||||
.await;
|
||||
|
||||
let token_response = match token_response {
|
||||
Ok(tr) => tr,
|
||||
Err(e) => {
|
||||
tracing::error!("OIDC exchange code error: {:?}", e);
|
||||
if let openidconnect::RequestTokenError::ServerResponse(err) = &e {
|
||||
tracing::error!("OIDC Server returned error: {:?}", err);
|
||||
}
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, format!("OIDC error: {}", e)).into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let id_token = match token_response.id_token() {
|
||||
Some(t) => t,
|
||||
None => return (StatusCode::INTERNAL_SERVER_ERROR, "No ID token").into_response(),
|
||||
};
|
||||
|
||||
let claims = match id_token.claims(&oidc.client.id_token_verifier(), &nonce) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return (StatusCode::UNAUTHORIZED, format!("Invalid ID token: {}", e)).into_response(),
|
||||
};
|
||||
|
||||
let user_id = claims
|
||||
.preferred_username()
|
||||
.map(|u| u.to_string())
|
||||
.or_else(|| claims.email().map(|e| e.to_string()))
|
||||
.unwrap_or_else(|| claims.subject().to_string());
|
||||
|
||||
let session_val = generate_sso_cookie(&oidc.session_secret, &user_id);
|
||||
|
||||
let parts: Vec<&str> = cookie_val.split(':').collect();
|
||||
let redirect_to = parts.get(3)
|
||||
.and_then(|&s| urlencoding::decode(s).ok())
|
||||
.map(|v| v.into_owned())
|
||||
.unwrap_or_else(|| "/".to_string());
|
||||
let redirect_to = if redirect_to.is_empty() { "/".to_string() } else { redirect_to };
|
||||
|
||||
let is_https = req.headers().get("x-forwarded-proto")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| s == "https")
|
||||
.unwrap_or(false);
|
||||
|
||||
let session_attrs = if is_https {
|
||||
"SameSite=Strict; Secure"
|
||||
} else {
|
||||
"SameSite=Strict"
|
||||
};
|
||||
|
||||
let session_cookie = format!("{}={}; HttpOnly; {}; Path=/; Max-Age=604800", SESSION_COOKIE, session_val, session_attrs);
|
||||
let clear_state_cookie = "furumi_oidc_state=; HttpOnly; Path=/; Expires=Thu, 01 Jan 1970 00:00:00 GMT";
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, session_cookie.parse().unwrap());
|
||||
headers.append(header::SET_COOKIE, clear_state_cookie.parse().unwrap());
|
||||
headers.insert(header::LOCATION, redirect_to.parse().unwrap());
|
||||
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
}
|
||||
|
||||
const LOGIN_HTML: &str = r#"<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Furumi Player — Login</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
min-height: 100vh;
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
background: #0d0f14;
|
||||
font-family: 'Inter', system-ui, sans-serif;
|
||||
color: #e2e8f0;
|
||||
}
|
||||
.card {
|
||||
background: #161b27;
|
||||
border: 1px solid #2a3347;
|
||||
border-radius: 16px;
|
||||
padding: 2.5rem 3rem;
|
||||
width: 360px;
|
||||
box-shadow: 0 20px 60px rgba(0,0,0,0.5);
|
||||
}
|
||||
.logo { font-size: 1.8rem; font-weight: 700; color: #7c6af7; margin-bottom: 0.25rem; }
|
||||
.subtitle { font-size: 0.85rem; color: #64748b; margin-bottom: 2rem; }
|
||||
label { display: block; font-size: 0.8rem; color: #94a3b8; margin-bottom: 0.4rem; }
|
||||
input[type=password] {
|
||||
width: 100%; padding: 0.6rem 0.8rem;
|
||||
background: #0d0f14; border: 1px solid #2a3347; border-radius: 8px;
|
||||
color: #e2e8f0; font-size: 0.95rem; outline: none;
|
||||
transition: border-color 0.2s;
|
||||
}
|
||||
input[type=password]:focus { border-color: #7c6af7; }
|
||||
button {
|
||||
margin-top: 1.2rem; width: 100%; padding: 0.65rem;
|
||||
background: #7c6af7; border: none; border-radius: 8px;
|
||||
color: #fff; font-size: 0.95rem; font-weight: 600; cursor: pointer;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
button:hover { background: #6b58e8; }
|
||||
.btn-oidc {
|
||||
display: block; width: 100%; padding: 0.65rem; text-align: center;
|
||||
background: #2a3347; border: 1px solid #3d4a66; border-radius: 8px;
|
||||
color: #e2e8f0; font-size: 0.95rem; font-weight: 600; text-decoration: none;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
.btn-oidc:hover { background: #3d4a66; }
|
||||
.divider {
|
||||
display: flex; align-items: center; text-align: center; margin: 1.5rem 0;
|
||||
color: #64748b; font-size: 0.75rem;
|
||||
}
|
||||
.divider::before, .divider::after {
|
||||
content: ''; flex: 1; border-bottom: 1px solid #2a3347;
|
||||
}
|
||||
.divider span { padding: 0 10px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="card">
|
||||
<div class="logo">🎵 Furumi</div>
|
||||
<div class="subtitle">Enter access token to continue</div>
|
||||
<form method="POST" action="/login">
|
||||
<!-- NEXT_INPUT_PLACEHOLDER -->
|
||||
<label for="password">Access Token</label>
|
||||
<input type="password" id="password" name="password" autofocus autocomplete="current-password">
|
||||
<button type="submit">Sign In</button>
|
||||
</form>
|
||||
<!-- OIDC_PLACEHOLDER -->
|
||||
</div>
|
||||
</body>
|
||||
</html>"#;
|
||||
|
||||
const LOGIN_ERROR_HTML: &str = r#"<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Furumi Player — Login</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
min-height: 100vh;
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
background: #0d0f14;
|
||||
font-family: 'Inter', system-ui, sans-serif;
|
||||
color: #e2e8f0;
|
||||
}
|
||||
.card {
|
||||
background: #161b27;
|
||||
border: 1px solid #2a3347;
|
||||
border-radius: 16px;
|
||||
padding: 2.5rem 3rem;
|
||||
width: 360px;
|
||||
box-shadow: 0 20px 60px rgba(0,0,0,0.5);
|
||||
}
|
||||
.logo { font-size: 1.8rem; font-weight: 700; color: #7c6af7; margin-bottom: 0.25rem; }
|
||||
.subtitle { font-size: 0.85rem; color: #64748b; margin-bottom: 2rem; }
|
||||
.error { color: #f87171; font-size: 0.85rem; margin-bottom: 1rem; }
|
||||
label { display: block; font-size: 0.8rem; color: #94a3b8; margin-bottom: 0.4rem; }
|
||||
input[type=password] {
|
||||
width: 100%; padding: 0.6rem 0.8rem;
|
||||
background: #0d0f14; border: 1px solid #f87171; border-radius: 8px;
|
||||
color: #e2e8f0; font-size: 0.95rem; outline: none;
|
||||
}
|
||||
button {
|
||||
margin-top: 1.2rem; width: 100%; padding: 0.65rem;
|
||||
background: #7c6af7; border: none; border-radius: 8px;
|
||||
color: #fff; font-size: 0.95rem; font-weight: 600; cursor: pointer;
|
||||
}
|
||||
button:hover { background: #6b58e8; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="card">
|
||||
<div class="logo">🎵 Furumi</div>
|
||||
<div class="subtitle">Enter access token to continue</div>
|
||||
<p class="error">❌ Invalid token. Please try again.</p>
|
||||
<form method="POST" action="/login">
|
||||
<!-- NEXT_INPUT_PLACEHOLDER -->
|
||||
<label for="password">Access Token</label>
|
||||
<input type="password" id="password" name="password" autofocus>
|
||||
<button type="submit">Sign In</button>
|
||||
</form>
|
||||
</div>
|
||||
</body>
|
||||
</html>"#;
|
||||
132
furumi-server/src/web/browse.rs
Normal file
132
furumi-server/src/web/browse.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Json},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::security::sanitize_path;
|
||||
use super::WebState;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BrowseQuery {
|
||||
#[serde(default)]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BrowseResponse {
|
||||
pub path: String,
|
||||
pub entries: Vec<Entry>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Entry {
|
||||
pub name: String,
|
||||
#[serde(rename = "type")]
|
||||
pub kind: EntryKind,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum EntryKind {
|
||||
File,
|
||||
Dir,
|
||||
}
|
||||
|
||||
pub async fn handler(
|
||||
State(state): State<WebState>,
|
||||
Query(query): Query<BrowseQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let safe = match sanitize_path(&query.path) {
|
||||
Ok(p) => p,
|
||||
Err(_) => {
|
||||
return (StatusCode::BAD_REQUEST, Json(serde_json::json!({"error": "invalid path"}))).into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let dir_path: PathBuf = state.root.join(&safe);
|
||||
|
||||
let read_dir = match tokio::fs::read_dir(&dir_path).await {
|
||||
Ok(rd) => rd,
|
||||
Err(e) => {
|
||||
let status = if e.kind() == std::io::ErrorKind::NotFound {
|
||||
StatusCode::NOT_FOUND
|
||||
} else {
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
};
|
||||
return (status, Json(serde_json::json!({"error": e.to_string()}))).into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let mut entries: Vec<Entry> = Vec::new();
|
||||
let mut rd = read_dir;
|
||||
|
||||
loop {
|
||||
match rd.next_entry().await {
|
||||
Ok(Some(entry)) => {
|
||||
let name = entry.file_name().to_string_lossy().into_owned();
|
||||
// Skip hidden files
|
||||
if name.starts_with('.') {
|
||||
continue;
|
||||
}
|
||||
let meta = match entry.metadata().await {
|
||||
Ok(m) => m,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if meta.is_dir() {
|
||||
entries.push(Entry { name, kind: EntryKind::Dir, size: None });
|
||||
} else if meta.is_file() {
|
||||
// Only expose audio files
|
||||
if is_audio_file(&name) {
|
||||
entries.push(Entry {
|
||||
name,
|
||||
kind: EntryKind::File,
|
||||
size: Some(meta.len()),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => break,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({"error": e.to_string()})),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort: dirs first, then files; alphabetically within each group
|
||||
entries.sort_by(|a, b| {
|
||||
let a_dir = matches!(a.kind, EntryKind::Dir);
|
||||
let b_dir = matches!(b.kind, EntryKind::Dir);
|
||||
b_dir.cmp(&a_dir).then(a.name.to_lowercase().cmp(&b.name.to_lowercase()))
|
||||
});
|
||||
|
||||
let response = BrowseResponse {
|
||||
path: safe,
|
||||
entries,
|
||||
};
|
||||
|
||||
(StatusCode::OK, Json(response)).into_response()
|
||||
}
|
||||
|
||||
/// Whitelist of audio extensions served via the web player.
|
||||
pub fn is_audio_file(name: &str) -> bool {
|
||||
let ext = name.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
matches!(
|
||||
ext.as_str(),
|
||||
"mp3" | "flac" | "ogg" | "opus" | "aac" | "m4a" | "wav" | "ape" | "wv" | "wma" | "tta" | "aiff" | "aif"
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if the format needs transcoding (not natively supported by browsers).
|
||||
pub fn needs_transcode(name: &str) -> bool {
|
||||
let ext = name.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
matches!(ext.as_str(), "ape" | "wv" | "wma" | "tta" | "aiff" | "aif")
|
||||
}
|
||||
204
furumi-server/src/web/meta.rs
Normal file
204
furumi-server/src/web/meta.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Json},
|
||||
};
|
||||
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
|
||||
use serde::Serialize;
|
||||
use symphonia::core::{
|
||||
codecs::CODEC_TYPE_NULL,
|
||||
formats::FormatOptions,
|
||||
io::MediaSourceStream,
|
||||
meta::{MetadataOptions, StandardTagKey},
|
||||
probe::Hint,
|
||||
};
|
||||
|
||||
use crate::security::sanitize_path;
|
||||
use super::WebState;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct MetaResponse {
|
||||
pub title: Option<String>,
|
||||
pub artist: Option<String>,
|
||||
pub album: Option<String>,
|
||||
pub track: Option<u32>,
|
||||
pub year: Option<u32>,
|
||||
pub duration_secs: Option<f64>,
|
||||
pub cover_base64: Option<String>, // "data:image/jpeg;base64,..."
|
||||
}
|
||||
|
||||
pub async fn handler(
|
||||
State(state): State<WebState>,
|
||||
Path(path): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
let safe = match sanitize_path(&path) {
|
||||
Ok(p) => p,
|
||||
Err(_) => return (StatusCode::BAD_REQUEST, Json(serde_json::json!({"error": "invalid path"}))).into_response(),
|
||||
};
|
||||
|
||||
let file_path = state.root.join(&safe);
|
||||
let filename = file_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("")
|
||||
.to_owned();
|
||||
|
||||
let meta = tokio::task::spawn_blocking(move || read_meta(file_path, &filename)).await;
|
||||
|
||||
match meta {
|
||||
Ok(Ok(m)) => (StatusCode::OK, Json(m)).into_response(),
|
||||
Ok(Err(e)) => (StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"error": e.to_string()}))).into_response(),
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"error": e.to_string()}))).into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_meta(file_path: std::path::PathBuf, filename: &str) -> anyhow::Result<MetaResponse> {
|
||||
let file = std::fs::File::open(&file_path)?;
|
||||
let mss = MediaSourceStream::new(Box::new(file), Default::default());
|
||||
|
||||
let mut hint = Hint::new();
|
||||
if let Some(ext) = file_path.extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
|
||||
let mut probed = symphonia::default::get_probe().format(
|
||||
&hint,
|
||||
mss,
|
||||
&FormatOptions { enable_gapless: false, ..Default::default() },
|
||||
&MetadataOptions::default(),
|
||||
)?;
|
||||
|
||||
// Extract tags from container-level metadata
|
||||
let mut title: Option<String> = None;
|
||||
let mut artist: Option<String> = None;
|
||||
let mut album: Option<String> = None;
|
||||
let mut track: Option<u32> = None;
|
||||
let mut year: Option<u32> = None;
|
||||
let mut cover_data: Option<(Vec<u8>, String)> = None;
|
||||
|
||||
// Check metadata side-data (e.g., ID3 tags probed before format)
|
||||
if let Some(rev) = probed.metadata.get().as_ref().and_then(|m| m.current()) {
|
||||
extract_tags(rev.tags(), rev.visuals(), &mut title, &mut artist, &mut album, &mut track, &mut year, &mut cover_data);
|
||||
}
|
||||
|
||||
// Also check format-embedded metadata
|
||||
if let Some(rev) = probed.format.metadata().current() {
|
||||
if title.is_none() {
|
||||
extract_tags(rev.tags(), rev.visuals(), &mut title, &mut artist, &mut album, &mut track, &mut year, &mut cover_data);
|
||||
}
|
||||
}
|
||||
|
||||
// If no title from tags, use filename without extension
|
||||
if title.is_none() {
|
||||
title = Some(
|
||||
std::path::Path::new(filename)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or(filename)
|
||||
.to_owned(),
|
||||
);
|
||||
}
|
||||
|
||||
// Estimate duration from track time_base + n_frames
|
||||
let duration_secs = probed
|
||||
.format
|
||||
.tracks()
|
||||
.iter()
|
||||
.find(|t| t.codec_params.codec != CODEC_TYPE_NULL)
|
||||
.and_then(|t| {
|
||||
let n_frames = t.codec_params.n_frames?;
|
||||
let tb = t.codec_params.time_base?;
|
||||
Some(n_frames as f64 * tb.numer as f64 / tb.denom as f64)
|
||||
});
|
||||
|
||||
let cover_base64 = cover_data.map(|(data, mime)| {
|
||||
format!("data:{};base64,{}", mime, BASE64.encode(&data))
|
||||
});
|
||||
|
||||
Ok(MetaResponse {
|
||||
title,
|
||||
artist,
|
||||
album,
|
||||
track,
|
||||
year,
|
||||
duration_secs,
|
||||
cover_base64,
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_tags(
|
||||
tags: &[symphonia::core::meta::Tag],
|
||||
visuals: &[symphonia::core::meta::Visual],
|
||||
title: &mut Option<String>,
|
||||
artist: &mut Option<String>,
|
||||
album: &mut Option<String>,
|
||||
track: &mut Option<u32>,
|
||||
year: &mut Option<u32>,
|
||||
cover: &mut Option<(Vec<u8>, String)>,
|
||||
) {
|
||||
for tag in tags {
|
||||
let value = fix_encoding(tag.value.to_string());
|
||||
if let Some(key) = tag.std_key {
|
||||
match key {
|
||||
StandardTagKey::TrackTitle => {
|
||||
*title = Some(value);
|
||||
}
|
||||
StandardTagKey::Artist | StandardTagKey::Performer => {
|
||||
if artist.is_none() {
|
||||
*artist = Some(value);
|
||||
}
|
||||
}
|
||||
StandardTagKey::Album => {
|
||||
*album = Some(value);
|
||||
}
|
||||
StandardTagKey::TrackNumber => {
|
||||
if track.is_none() {
|
||||
*track = value.parse().ok();
|
||||
}
|
||||
}
|
||||
StandardTagKey::Date | StandardTagKey::OriginalDate => {
|
||||
if year.is_none() {
|
||||
// Parse first 4 characters as year
|
||||
*year = value[..4.min(value.len())].parse().ok();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cover.is_none() {
|
||||
if let Some(visual) = visuals.first() {
|
||||
let mime = visual.media_type.clone();
|
||||
*cover = Some((visual.data.to_vec(), mime));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Heuristic to fix mojibake (CP1251 bytes interpreted as Latin-1/Windows-1252)
|
||||
fn fix_encoding(s: String) -> String {
|
||||
// If it's already a valid UTF-8 string that doesn't look like mojibake, return it.
|
||||
// Mojibake looks like characters from Latin-1 Supplement (0xC0-0xFF)
|
||||
// where they should be Cyrillic.
|
||||
|
||||
let bytes: Vec<u8> = s.chars().map(|c| c as u32).filter(|&c| c <= 255).map(|c| c as u8).collect();
|
||||
|
||||
// If the length is different, it means there were characters > 255, so it's not simple Latin-1 mojibake.
|
||||
if bytes.len() != s.chars().count() {
|
||||
return s;
|
||||
}
|
||||
|
||||
// Check if it's likely CP1251. Russian characters in CP1251 are 0xC0-0xFF.
|
||||
// In Latin-1 these are characters like À-ÿ.
|
||||
let has_mojibake = bytes.iter().any(|&b| b >= 0xC0);
|
||||
if !has_mojibake {
|
||||
return s;
|
||||
}
|
||||
|
||||
let (decoded, _, errors) = encoding_rs::WINDOWS_1251.decode(&bytes);
|
||||
if errors {
|
||||
return s;
|
||||
}
|
||||
|
||||
decoded.into_owned()
|
||||
}
|
||||
66
furumi-server/src/web/mod.rs
Normal file
66
furumi-server/src/web/mod.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
pub mod auth;
|
||||
pub mod browse;
|
||||
pub mod meta;
|
||||
pub mod stream;
|
||||
pub mod transcoder;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
Router,
|
||||
middleware,
|
||||
routing::get,
|
||||
};
|
||||
|
||||
/// Shared state passed to all web handlers.
|
||||
#[derive(Clone)]
|
||||
pub struct WebState {
|
||||
pub root: Arc<PathBuf>,
|
||||
pub token: Arc<String>,
|
||||
pub oidc: Option<Arc<OidcState>>,
|
||||
}
|
||||
|
||||
pub struct OidcState {
|
||||
pub client: openidconnect::core::CoreClient,
|
||||
pub session_secret: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Build the axum Router for the web player.
|
||||
pub fn build_router(root: PathBuf, token: String, oidc: Option<Arc<OidcState>>) -> Router {
|
||||
let state = WebState {
|
||||
root: Arc::new(root),
|
||||
token: Arc::new(token),
|
||||
oidc,
|
||||
};
|
||||
|
||||
let api = Router::new()
|
||||
.route("/browse", get(browse::handler))
|
||||
.route("/stream/*path", get(stream::handler))
|
||||
.route("/meta/*path", get(meta::handler));
|
||||
|
||||
let authed_routes = Router::new()
|
||||
.route("/", get(player_html))
|
||||
.nest("/api", api)
|
||||
.route_layer(middleware::from_fn_with_state(state.clone(), auth::require_auth));
|
||||
|
||||
Router::new()
|
||||
.route("/login", get(auth::login_page).post(auth::login_submit))
|
||||
.route("/logout", get(auth::logout))
|
||||
.route("/auth/login", get(auth::oidc_login))
|
||||
.route("/auth/callback", get(auth::oidc_callback))
|
||||
.merge(authed_routes)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuthUserInfo(pub String);
|
||||
|
||||
async fn player_html(
|
||||
axum::extract::Extension(user_info): axum::extract::Extension<AuthUserInfo>,
|
||||
) -> axum::response::Html<String> {
|
||||
let html = include_str!("player.html")
|
||||
.replace("<!-- USERNAME_PLACEHOLDER -->", &user_info.0)
|
||||
.replace("<!-- VERSION_PLACEHOLDER -->", option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION")));
|
||||
axum::response::Html(html)
|
||||
}
|
||||
1101
furumi-server/src/web/player.html
Normal file
1101
furumi-server/src/web/player.html
Normal file
File diff suppressed because it is too large
Load Diff
171
furumi-server/src/web/stream.rs
Normal file
171
furumi-server/src/web/stream.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Path, Query, State},
|
||||
http::{HeaderMap, HeaderValue, StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use tokio::io::{AsyncReadExt, AsyncSeekExt};
|
||||
|
||||
use crate::security::sanitize_path;
|
||||
use super::{
|
||||
WebState,
|
||||
browse::{is_audio_file, needs_transcode},
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct StreamQuery {
|
||||
#[serde(default)]
|
||||
pub transcode: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn handler(
|
||||
State(state): State<WebState>,
|
||||
Path(path): Path<String>,
|
||||
Query(query): Query<StreamQuery>,
|
||||
headers: HeaderMap,
|
||||
) -> impl IntoResponse {
|
||||
let safe = match sanitize_path(&path) {
|
||||
Ok(p) => p,
|
||||
Err(_) => return bad_request("invalid path"),
|
||||
};
|
||||
|
||||
let file_path = state.root.join(&safe);
|
||||
|
||||
let filename = file_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("")
|
||||
.to_owned();
|
||||
|
||||
if !is_audio_file(&filename) {
|
||||
return (StatusCode::FORBIDDEN, "not an audio file").into_response();
|
||||
}
|
||||
|
||||
let force_transcode = query.transcode.as_deref() == Some("1");
|
||||
|
||||
if force_transcode || needs_transcode(&filename) {
|
||||
return stream_transcoded(file_path).await;
|
||||
}
|
||||
|
||||
stream_native(file_path, &filename, &headers).await
|
||||
}
|
||||
|
||||
/// Stream a file as-is with Range support.
|
||||
async fn stream_native(file_path: std::path::PathBuf, filename: &str, req_headers: &HeaderMap) -> Response {
|
||||
let mut file = match tokio::fs::File::open(&file_path).await {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
let status = if e.kind() == std::io::ErrorKind::NotFound {
|
||||
StatusCode::NOT_FOUND
|
||||
} else {
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
};
|
||||
return (status, e.to_string()).into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let file_size = match file.metadata().await {
|
||||
Ok(m) => m.len(),
|
||||
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||
};
|
||||
|
||||
let content_type = guess_content_type(filename);
|
||||
|
||||
// Parse Range header
|
||||
let range_header = req_headers
|
||||
.get(header::RANGE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(parse_range);
|
||||
|
||||
if let Some((start, end)) = range_header {
|
||||
let end = end.unwrap_or(file_size - 1).min(file_size - 1);
|
||||
if start > end || start >= file_size {
|
||||
return (StatusCode::RANGE_NOT_SATISFIABLE, "invalid range").into_response();
|
||||
}
|
||||
|
||||
let length = end - start + 1;
|
||||
|
||||
if let Err(e) = file.seek(std::io::SeekFrom::Start(start)).await {
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response();
|
||||
}
|
||||
|
||||
let limited = file.take(length);
|
||||
let stream = tokio_util::io::ReaderStream::new(limited);
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
let mut resp_headers = HeaderMap::new();
|
||||
resp_headers.insert(header::CONTENT_TYPE, content_type.parse().unwrap());
|
||||
resp_headers.insert(header::ACCEPT_RANGES, HeaderValue::from_static("bytes"));
|
||||
resp_headers.insert(header::CONTENT_LENGTH, length.to_string().parse().unwrap());
|
||||
resp_headers.insert(
|
||||
header::CONTENT_RANGE,
|
||||
format!("bytes {}-{}/{}", start, end, file_size).parse().unwrap(),
|
||||
);
|
||||
(StatusCode::PARTIAL_CONTENT, resp_headers, body).into_response()
|
||||
} else {
|
||||
// Full file
|
||||
let stream = tokio_util::io::ReaderStream::new(file);
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
let mut resp_headers = HeaderMap::new();
|
||||
resp_headers.insert(header::CONTENT_TYPE, content_type.parse().unwrap());
|
||||
resp_headers.insert(header::ACCEPT_RANGES, HeaderValue::from_static("bytes"));
|
||||
resp_headers.insert(header::CONTENT_LENGTH, file_size.to_string().parse().unwrap());
|
||||
(StatusCode::OK, resp_headers, body).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
/// Stream a transcoded (Ogg/Opus) version of the file.
|
||||
async fn stream_transcoded(file_path: std::path::PathBuf) -> Response {
|
||||
let ogg_data = match tokio::task::spawn_blocking(move || {
|
||||
super::transcoder::transcode_to_ogg_opus(file_path)
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(Ok(data)) => data,
|
||||
Ok(Err(e)) => {
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response();
|
||||
}
|
||||
Err(e) => {
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let len = ogg_data.len();
|
||||
let mut resp_headers = HeaderMap::new();
|
||||
resp_headers.insert(header::CONTENT_TYPE, "audio/ogg".parse().unwrap());
|
||||
resp_headers.insert(header::CONTENT_LENGTH, len.to_string().parse().unwrap());
|
||||
resp_headers.insert(header::ACCEPT_RANGES, HeaderValue::from_static("none"));
|
||||
|
||||
(StatusCode::OK, resp_headers, Body::from(ogg_data)).into_response()
|
||||
}
|
||||
|
||||
/// Parse `Range: bytes=<start>-<end>` header.
|
||||
fn parse_range(s: &str) -> Option<(u64, Option<u64>)> {
|
||||
let s = s.strip_prefix("bytes=")?;
|
||||
let mut parts = s.splitn(2, '-');
|
||||
let start: u64 = parts.next()?.parse().ok()?;
|
||||
let end: Option<u64> = parts.next().and_then(|e| {
|
||||
if e.is_empty() { None } else { e.parse().ok() }
|
||||
});
|
||||
Some((start, end))
|
||||
}
|
||||
|
||||
fn guess_content_type(filename: &str) -> &'static str {
|
||||
let ext = filename.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
match ext.as_str() {
|
||||
"mp3" => "audio/mpeg",
|
||||
"flac" => "audio/flac",
|
||||
"ogg" => "audio/ogg",
|
||||
"opus" => "audio/ogg; codecs=opus",
|
||||
"aac" => "audio/aac",
|
||||
"m4a" => "audio/mp4",
|
||||
"wav" => "audio/wav",
|
||||
_ => "application/octet-stream",
|
||||
}
|
||||
}
|
||||
|
||||
fn bad_request(msg: &'static str) -> Response {
|
||||
(StatusCode::BAD_REQUEST, msg).into_response()
|
||||
}
|
||||
244
furumi-server/src/web/transcoder.rs
Normal file
244
furumi-server/src/web/transcoder.rs
Normal file
@@ -0,0 +1,244 @@
|
||||
//! Symphonia-based audio transcoder: decodes any format → encodes to Ogg/Opus stream.
|
||||
//!
|
||||
//! The heavy decode/encode work runs in a `spawn_blocking` thread.
|
||||
//! PCM samples are sent over a channel to the async stream handler.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::io::Cursor;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use symphonia::core::{
|
||||
audio::{AudioBufferRef, Signal},
|
||||
codecs::{DecoderOptions, CODEC_TYPE_NULL},
|
||||
errors::Error as SymphoniaError,
|
||||
formats::FormatOptions,
|
||||
io::MediaSourceStream,
|
||||
meta::MetadataOptions,
|
||||
probe::Hint,
|
||||
};
|
||||
use ogg::writing::PacketWriter;
|
||||
use opus::{Application, Channels, Encoder};
|
||||
|
||||
/// Transcode an audio file at `path` into an Ogg/Opus byte stream.
|
||||
/// Returns `Vec<u8>` with the full Ogg/Opus file (suitable for streaming/download).
|
||||
///
|
||||
/// This is intentionally synchronous (for use inside `spawn_blocking`).
|
||||
pub fn transcode_to_ogg_opus(path: PathBuf) -> Result<Vec<u8>> {
|
||||
// ---- Open and probe the source ----
|
||||
let file = std::fs::File::open(&path)?;
|
||||
let mss = MediaSourceStream::new(Box::new(file), Default::default());
|
||||
|
||||
let mut hint = Hint::new();
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
|
||||
let probed = symphonia::default::get_probe()
|
||||
.format(&hint, mss, &FormatOptions::default(), &MetadataOptions::default())
|
||||
.map_err(|e| anyhow!("probe failed: {e}"))?;
|
||||
|
||||
let mut format = probed.format;
|
||||
|
||||
// Find the default audio track
|
||||
let track = format
|
||||
.tracks()
|
||||
.iter()
|
||||
.find(|t| t.codec_params.codec != CODEC_TYPE_NULL)
|
||||
.ok_or_else(|| anyhow!("no audio track found"))?
|
||||
.clone();
|
||||
|
||||
let track_id = track.id;
|
||||
let codec_params = &track.codec_params;
|
||||
|
||||
let sample_rate = codec_params.sample_rate.unwrap_or(44100);
|
||||
let n_channels = codec_params.channels.map(|c| c.count()).unwrap_or(2);
|
||||
|
||||
// Opus only supports 1 or 2 channels; downmix to stereo if needed
|
||||
let opus_channels = if n_channels == 1 { Channels::Mono } else { Channels::Stereo };
|
||||
let opus_ch_count = if n_channels == 1 { 1usize } else { 2 };
|
||||
|
||||
// Opus encoder (target 48 kHz, we'll resample if needed)
|
||||
// Opus natively works at 48000 Hz; symphonia will decode at source rate.
|
||||
// For simplicity, we encode at the source sample rate - most clients handle this.
|
||||
let opus_sample_rate = if [8000u32, 12000, 16000, 24000, 48000].contains(&sample_rate) {
|
||||
sample_rate
|
||||
} else {
|
||||
// Opus spec: use closest supported rate; 48000 is safest
|
||||
48000
|
||||
};
|
||||
|
||||
let mut encoder = Encoder::new(opus_sample_rate, opus_channels, Application::Audio)
|
||||
.map_err(|e| anyhow!("opus encoder init: {e}"))?;
|
||||
|
||||
// Typical Opus frame = 20ms
|
||||
let frame_size = (opus_sample_rate as usize * 20) / 1000; // samples per channel per frame
|
||||
|
||||
let mut decoder = symphonia::default::get_codecs()
|
||||
.make(codec_params, &DecoderOptions::default())
|
||||
.map_err(|e| anyhow!("decoder init: {e}"))?;
|
||||
|
||||
// ---- Ogg output buffer ----
|
||||
let mut ogg_buf: Vec<u8> = Vec::with_capacity(4 * 1024 * 1024);
|
||||
{
|
||||
let cursor = Cursor::new(&mut ogg_buf);
|
||||
let mut pkt_writer = PacketWriter::new(cursor);
|
||||
|
||||
// Write Opus header packet (stream serial = 1)
|
||||
let serial: u32 = 1;
|
||||
let opus_head = build_opus_head(opus_ch_count as u8, opus_sample_rate, 0);
|
||||
pkt_writer.write_packet(opus_head, serial, ogg::writing::PacketWriteEndInfo::EndPage, 0)?;
|
||||
|
||||
// Write Opus tags packet (empty)
|
||||
let opus_tags = build_opus_tags();
|
||||
pkt_writer.write_packet(opus_tags, serial, ogg::writing::PacketWriteEndInfo::EndPage, 0)?;
|
||||
|
||||
let mut sample_buf: Vec<f32> = Vec::new();
|
||||
let mut granule_pos: u64 = 0;
|
||||
|
||||
loop {
|
||||
let packet = match format.next_packet() {
|
||||
Ok(p) => p,
|
||||
Err(SymphoniaError::IoError(e)) if e.kind() == std::io::ErrorKind::UnexpectedEof => break,
|
||||
Err(SymphoniaError::ResetRequired) => {
|
||||
decoder.reset();
|
||||
continue;
|
||||
}
|
||||
Err(e) => return Err(anyhow!("format error: {e}")),
|
||||
};
|
||||
|
||||
if packet.track_id() != track_id {
|
||||
continue;
|
||||
}
|
||||
|
||||
match decoder.decode(&packet) {
|
||||
Ok(decoded) => {
|
||||
collect_samples(&decoded, opus_ch_count, &mut sample_buf);
|
||||
}
|
||||
Err(SymphoniaError::DecodeError(_)) => continue,
|
||||
Err(e) => return Err(anyhow!("decode error: {e}")),
|
||||
}
|
||||
|
||||
// Encode complete frames from sample_buf
|
||||
while sample_buf.len() >= frame_size * opus_ch_count {
|
||||
let frame: Vec<f32> = sample_buf.drain(..frame_size * opus_ch_count).collect();
|
||||
let mut out = vec![0u8; 4000];
|
||||
let encoded_len = encoder
|
||||
.encode_float(&frame, &mut out)
|
||||
.map_err(|e| anyhow!("opus encode: {e}"))?;
|
||||
out.truncate(encoded_len);
|
||||
|
||||
granule_pos += frame_size as u64;
|
||||
pkt_writer.write_packet(
|
||||
out,
|
||||
serial,
|
||||
ogg::writing::PacketWriteEndInfo::NormalPacket,
|
||||
granule_pos,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Encode remaining samples (partial frame — pad with silence)
|
||||
if !sample_buf.is_empty() {
|
||||
let needed = frame_size * opus_ch_count;
|
||||
sample_buf.resize(needed, 0.0);
|
||||
let mut out = vec![0u8; 4000];
|
||||
let encoded_len = encoder
|
||||
.encode_float(&sample_buf, &mut out)
|
||||
.map_err(|e| anyhow!("opus encode final: {e}"))?;
|
||||
out.truncate(encoded_len);
|
||||
granule_pos += frame_size as u64;
|
||||
pkt_writer.write_packet(
|
||||
out,
|
||||
serial,
|
||||
ogg::writing::PacketWriteEndInfo::EndStream,
|
||||
granule_pos,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ogg_buf)
|
||||
}
|
||||
|
||||
/// Collect PCM samples from a symphonia AudioBufferRef into a flat f32 vec.
|
||||
/// Downmixes to `target_channels` (1 or 2) if source has more channels.
|
||||
fn collect_samples(decoded: &AudioBufferRef<'_>, target_channels: usize, out: &mut Vec<f32>) {
|
||||
match decoded {
|
||||
AudioBufferRef::F32(buf) => {
|
||||
interleave_channels(buf.chan(0), if buf.spec().channels.count() > 1 { Some(buf.chan(1)) } else { None }, target_channels, out);
|
||||
}
|
||||
AudioBufferRef::S16(buf) => {
|
||||
let ch0: Vec<f32> = buf.chan(0).iter().map(|&s| s as f32 / 32768.0).collect();
|
||||
let ch1 = if buf.spec().channels.count() > 1 {
|
||||
Some(buf.chan(1).iter().map(|&s| s as f32 / 32768.0).collect::<Vec<_>>())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
interleave_channels(&ch0, ch1.as_deref(), target_channels, out);
|
||||
}
|
||||
AudioBufferRef::S32(buf) => {
|
||||
let ch0: Vec<f32> = buf.chan(0).iter().map(|&s| s as f32 / 2147483648.0).collect();
|
||||
let ch1 = if buf.spec().channels.count() > 1 {
|
||||
Some(buf.chan(1).iter().map(|&s| s as f32 / 2147483648.0).collect::<Vec<_>>())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
interleave_channels(&ch0, ch1.as_deref(), target_channels, out);
|
||||
}
|
||||
AudioBufferRef::U8(buf) => {
|
||||
let ch0: Vec<f32> = buf.chan(0).iter().map(|&s| (s as f32 - 128.0) / 128.0).collect();
|
||||
let ch1 = if buf.spec().channels.count() > 1 {
|
||||
Some(buf.chan(1).iter().map(|&s| (s as f32 - 128.0) / 128.0).collect::<Vec<_>>())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
interleave_channels(&ch0, ch1.as_deref(), target_channels, out);
|
||||
}
|
||||
_ => {
|
||||
// For other formats, try to get samples via S16 conversion
|
||||
// (symphonia may provide other types; we skip unsupported ones)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn interleave_channels(ch0: &[f32], ch1: Option<&[f32]>, target_channels: usize, out: &mut Vec<f32>) {
|
||||
let len = ch0.len();
|
||||
if target_channels == 1 {
|
||||
if let Some(c1) = ch1 {
|
||||
// Mix down to mono
|
||||
out.extend(ch0.iter().zip(c1.iter()).map(|(l, r)| (l + r) * 0.5));
|
||||
} else {
|
||||
out.extend_from_slice(ch0);
|
||||
}
|
||||
} else {
|
||||
// Stereo interleaved
|
||||
let c1 = ch1.unwrap_or(ch0);
|
||||
for i in 0..len {
|
||||
out.push(ch0[i]);
|
||||
out.push(c1[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Build OpusHead binary packet (RFC 7845).
|
||||
fn build_opus_head(channels: u8, sample_rate: u32, pre_skip: u16) -> Vec<u8> {
|
||||
let mut v = Vec::with_capacity(19);
|
||||
v.extend_from_slice(b"OpusHead");
|
||||
v.push(1); // version
|
||||
v.push(channels);
|
||||
v.extend_from_slice(&pre_skip.to_le_bytes());
|
||||
v.extend_from_slice(&sample_rate.to_le_bytes());
|
||||
v.extend_from_slice(&0u16.to_le_bytes()); // output gain
|
||||
v.push(0); // channel mapping family
|
||||
v
|
||||
}
|
||||
|
||||
/// Build OpusTags binary packet with minimal vendor string.
|
||||
fn build_opus_tags() -> Vec<u8> {
|
||||
let vendor = b"furumi-server";
|
||||
let mut v = Vec::new();
|
||||
v.extend_from_slice(b"OpusTags");
|
||||
v.extend_from_slice(&(vendor.len() as u32).to_le_bytes());
|
||||
v.extend_from_slice(vendor);
|
||||
v.extend_from_slice(&0u32.to_le_bytes()); // user comment list length = 0
|
||||
v
|
||||
}
|
||||
27
furumi-web-player/Cargo.toml
Normal file
27
furumi-web-player/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "furumi-web-player"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
axum = { version = "0.7", features = ["tokio", "macros"] }
|
||||
clap = { version = "4.5", features = ["derive", "env"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "chrono", "uuid", "migrate"] }
|
||||
tokio = { version = "1.50", features = ["full"] }
|
||||
tower = { version = "0.4", features = ["util"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
mime_guess = "2.0"
|
||||
symphonia = { version = "0.5", default-features = false, features = ["mp3", "aac", "flac", "vorbis", "wav", "alac", "adpcm", "pcm", "mpa", "isomp4", "ogg", "aiff", "mkv"] }
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
openidconnect = "3.4"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] }
|
||||
sha2 = "0.10"
|
||||
hmac = "0.12"
|
||||
base64 = "0.22"
|
||||
rand = "0.8"
|
||||
urlencoding = "2.1.3"
|
||||
rustls = { version = "0.23", features = ["ring"] }
|
||||
234
furumi-web-player/src/db.rs
Normal file
234
furumi-web-player/src/db.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use serde::Serialize;
|
||||
use sqlx::PgPool;
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
|
||||
pub async fn connect(database_url: &str) -> Result<PgPool, sqlx::Error> {
|
||||
PgPoolOptions::new()
|
||||
.max_connections(10)
|
||||
.connect(database_url)
|
||||
.await
|
||||
}
|
||||
|
||||
// --- Models ---
|
||||
|
||||
#[derive(Debug, Serialize, sqlx::FromRow)]
|
||||
pub struct ArtistListItem {
|
||||
pub slug: String,
|
||||
pub name: String,
|
||||
pub album_count: i64,
|
||||
pub track_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, sqlx::FromRow)]
|
||||
pub struct ArtistDetail {
|
||||
pub slug: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, sqlx::FromRow)]
|
||||
pub struct AlbumListItem {
|
||||
pub slug: String,
|
||||
pub name: String,
|
||||
pub year: Option<i32>,
|
||||
pub track_count: i64,
|
||||
pub has_cover: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, sqlx::FromRow)]
|
||||
pub struct TrackListItem {
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
pub track_number: Option<i32>,
|
||||
pub duration_secs: Option<f64>,
|
||||
pub artist_name: String,
|
||||
pub album_name: Option<String>,
|
||||
pub album_slug: Option<String>,
|
||||
pub genre: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, sqlx::FromRow)]
|
||||
pub struct TrackDetail {
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
pub track_number: Option<i32>,
|
||||
pub duration_secs: Option<f64>,
|
||||
pub genre: Option<String>,
|
||||
pub storage_path: String,
|
||||
pub artist_name: String,
|
||||
pub artist_slug: String,
|
||||
pub album_name: Option<String>,
|
||||
pub album_slug: Option<String>,
|
||||
pub album_year: Option<i32>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, sqlx::FromRow)]
|
||||
pub struct CoverInfo {
|
||||
pub file_path: String,
|
||||
pub mime_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, sqlx::FromRow)]
|
||||
pub struct TrackCoverLookup {
|
||||
pub storage_path: String,
|
||||
pub album_id: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, sqlx::FromRow)]
|
||||
pub struct SearchResult {
|
||||
pub result_type: String, // "artist", "album", "track"
|
||||
pub slug: String,
|
||||
pub name: String,
|
||||
pub detail: Option<String>, // artist name for albums/tracks
|
||||
}
|
||||
|
||||
// --- Queries ---
|
||||
|
||||
pub async fn list_artists(pool: &PgPool) -> Result<Vec<ArtistListItem>, sqlx::Error> {
|
||||
sqlx::query_as::<_, ArtistListItem>(
|
||||
r#"SELECT ar.slug, ar.name,
|
||||
COUNT(DISTINCT al.id) AS album_count,
|
||||
COUNT(DISTINCT t.id) AS track_count
|
||||
FROM artists ar
|
||||
LEFT JOIN albums al ON al.artist_id = ar.id
|
||||
LEFT JOIN tracks t ON t.artist_id = ar.id
|
||||
WHERE NOT ar.hidden
|
||||
GROUP BY ar.id, ar.slug, ar.name
|
||||
HAVING COUNT(DISTINCT t.id) > 0
|
||||
ORDER BY ar.name"#
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_artist(pool: &PgPool, slug: &str) -> Result<Option<ArtistDetail>, sqlx::Error> {
|
||||
sqlx::query_as::<_, ArtistDetail>(
|
||||
"SELECT slug, name FROM artists WHERE slug = $1"
|
||||
)
|
||||
.bind(slug)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn list_albums_by_artist(pool: &PgPool, artist_slug: &str) -> Result<Vec<AlbumListItem>, sqlx::Error> {
|
||||
sqlx::query_as::<_, AlbumListItem>(
|
||||
r#"SELECT al.slug, al.name, al.year,
|
||||
COUNT(t.id) AS track_count,
|
||||
EXISTS(SELECT 1 FROM album_images ai WHERE ai.album_id = al.id AND ai.image_type = 'cover') AS has_cover
|
||||
FROM albums al
|
||||
JOIN artists ar ON al.artist_id = ar.id
|
||||
LEFT JOIN tracks t ON t.album_id = al.id AND NOT t.hidden
|
||||
WHERE ar.slug = $1
|
||||
AND NOT al.hidden
|
||||
AND EXISTS (SELECT 1 FROM tracks t2 WHERE t2.album_id = al.id AND NOT t2.hidden)
|
||||
GROUP BY al.id, al.slug, al.name, al.year
|
||||
ORDER BY al.year NULLS LAST, al.name"#
|
||||
)
|
||||
.bind(artist_slug)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn list_tracks_by_album(pool: &PgPool, album_slug: &str) -> Result<Vec<TrackListItem>, sqlx::Error> {
|
||||
sqlx::query_as::<_, TrackListItem>(
|
||||
r#"SELECT t.slug, t.title, t.track_number, t.duration_secs,
|
||||
ar.name AS artist_name,
|
||||
al.name AS album_name, al.slug AS album_slug, t.genre
|
||||
FROM tracks t
|
||||
JOIN artists ar ON t.artist_id = ar.id
|
||||
LEFT JOIN albums al ON t.album_id = al.id
|
||||
WHERE al.slug = $1
|
||||
AND NOT t.hidden
|
||||
ORDER BY t.track_number NULLS LAST, t.title"#
|
||||
)
|
||||
.bind(album_slug)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_track(pool: &PgPool, slug: &str) -> Result<Option<TrackDetail>, sqlx::Error> {
|
||||
sqlx::query_as::<_, TrackDetail>(
|
||||
r#"SELECT t.slug, t.title, t.track_number, t.duration_secs, t.genre,
|
||||
t.storage_path, ar.name AS artist_name, ar.slug AS artist_slug,
|
||||
al.name AS album_name, al.slug AS album_slug, al.year AS album_year
|
||||
FROM tracks t
|
||||
JOIN artists ar ON t.artist_id = ar.id
|
||||
LEFT JOIN albums al ON t.album_id = al.id
|
||||
WHERE t.slug = $1"#
|
||||
)
|
||||
.bind(slug)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_track_cover_lookup(pool: &PgPool, track_slug: &str) -> Result<Option<TrackCoverLookup>, sqlx::Error> {
|
||||
sqlx::query_as::<_, TrackCoverLookup>(
|
||||
"SELECT storage_path, album_id FROM tracks WHERE slug = $1"
|
||||
)
|
||||
.bind(track_slug)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_album_cover_by_id(pool: &PgPool, album_id: i64) -> Result<Option<CoverInfo>, sqlx::Error> {
|
||||
sqlx::query_as::<_, CoverInfo>(
|
||||
r#"SELECT file_path, mime_type FROM album_images
|
||||
WHERE album_id = $1 AND image_type = 'cover' LIMIT 1"#
|
||||
)
|
||||
.bind(album_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_album_cover(pool: &PgPool, album_slug: &str) -> Result<Option<CoverInfo>, sqlx::Error> {
|
||||
sqlx::query_as::<_, CoverInfo>(
|
||||
r#"SELECT ai.file_path, ai.mime_type
|
||||
FROM album_images ai
|
||||
JOIN albums al ON ai.album_id = al.id
|
||||
WHERE al.slug = $1 AND ai.image_type = 'cover'
|
||||
LIMIT 1"#
|
||||
)
|
||||
.bind(album_slug)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn search(pool: &PgPool, query: &str, limit: i32) -> Result<Vec<SearchResult>, sqlx::Error> {
|
||||
let pattern = format!("%{}%", query);
|
||||
sqlx::query_as::<_, SearchResult>(
|
||||
r#"SELECT * FROM (
|
||||
SELECT 'artist' AS result_type, slug, name, NULL AS detail
|
||||
FROM artists WHERE name ILIKE $1
|
||||
UNION ALL
|
||||
SELECT 'album' AS result_type, al.slug, al.name, ar.name AS detail
|
||||
FROM albums al JOIN artists ar ON al.artist_id = ar.id
|
||||
WHERE al.name ILIKE $1
|
||||
UNION ALL
|
||||
SELECT 'track' AS result_type, t.slug, t.title AS name, ar.name AS detail
|
||||
FROM tracks t JOIN artists ar ON t.artist_id = ar.id
|
||||
WHERE t.title ILIKE $1
|
||||
) sub ORDER BY result_type, name LIMIT $2"#
|
||||
)
|
||||
.bind(&pattern)
|
||||
.bind(limit)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn list_all_tracks_by_artist(pool: &PgPool, artist_slug: &str) -> Result<Vec<TrackListItem>, sqlx::Error> {
|
||||
sqlx::query_as::<_, TrackListItem>(
|
||||
r#"SELECT t.slug, t.title, t.track_number, t.duration_secs,
|
||||
ar.name AS artist_name,
|
||||
al.name AS album_name, al.slug AS album_slug, t.genre
|
||||
FROM tracks t
|
||||
JOIN artists ar ON t.artist_id = ar.id
|
||||
LEFT JOIN albums al ON t.album_id = al.id
|
||||
WHERE ar.slug = $1
|
||||
AND NOT t.hidden
|
||||
ORDER BY al.year NULLS LAST, al.name, t.track_number NULLS LAST, t.title"#
|
||||
)
|
||||
.bind(artist_slug)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
106
furumi-web-player/src/main.rs
Normal file
106
furumi-web-player/src/main.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
mod db;
|
||||
mod web;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about = "Furumi Web Player: database-backed music player")]
|
||||
struct Args {
|
||||
/// IP address and port for the web player
|
||||
#[arg(long, env = "FURUMI_PLAYER_BIND", default_value = "0.0.0.0:8080")]
|
||||
bind: String,
|
||||
|
||||
/// PostgreSQL connection URL
|
||||
#[arg(long, env = "FURUMI_PLAYER_DATABASE_URL")]
|
||||
database_url: String,
|
||||
|
||||
/// Root directory where music files are stored (agent's storage_dir)
|
||||
#[arg(long, env = "FURUMI_PLAYER_STORAGE_DIR")]
|
||||
storage_dir: std::path::PathBuf,
|
||||
|
||||
/// OIDC Issuer URL (e.g. https://auth.example.com/application/o/furumi/)
|
||||
#[arg(long, env = "FURUMI_PLAYER_OIDC_ISSUER_URL")]
|
||||
oidc_issuer_url: Option<String>,
|
||||
|
||||
/// OIDC Client ID
|
||||
#[arg(long, env = "FURUMI_PLAYER_OIDC_CLIENT_ID")]
|
||||
oidc_client_id: Option<String>,
|
||||
|
||||
/// OIDC Client Secret
|
||||
#[arg(long, env = "FURUMI_PLAYER_OIDC_CLIENT_SECRET")]
|
||||
oidc_client_secret: Option<String>,
|
||||
|
||||
/// OIDC Redirect URL (e.g. https://music.example.com/auth/callback)
|
||||
#[arg(long, env = "FURUMI_PLAYER_OIDC_REDIRECT_URL")]
|
||||
oidc_redirect_url: Option<String>,
|
||||
|
||||
/// OIDC Session Secret (32+ chars, for HMAC). Random if not provided.
|
||||
#[arg(long, env = "FURUMI_PLAYER_OIDC_SESSION_SECRET")]
|
||||
oidc_session_secret: Option<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Install ring as the default crypto provider for rustls
|
||||
rustls::crypto::ring::default_provider()
|
||||
.install_default()
|
||||
.expect("Failed to install rustls crypto provider");
|
||||
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
let version = option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION"));
|
||||
tracing::info!("Furumi Web Player v{} starting", version);
|
||||
tracing::info!("Storage directory: {:?}", args.storage_dir);
|
||||
|
||||
if !args.storage_dir.exists() || !args.storage_dir.is_dir() {
|
||||
eprintln!("Error: Storage directory {:?} does not exist or is not a directory", args.storage_dir);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
tracing::info!("Connecting to database...");
|
||||
let pool = db::connect(&args.database_url).await?;
|
||||
tracing::info!("Database connected");
|
||||
|
||||
// Initialize OIDC if configured
|
||||
let oidc_state = if let (Some(issuer), Some(client_id), Some(secret), Some(redirect)) = (
|
||||
args.oidc_issuer_url,
|
||||
args.oidc_client_id,
|
||||
args.oidc_client_secret,
|
||||
args.oidc_redirect_url,
|
||||
) {
|
||||
tracing::info!("OIDC (SSO): enabled (issuer: {})", issuer);
|
||||
match web::auth::oidc_init(issuer, client_id, secret, redirect, args.oidc_session_secret).await {
|
||||
Ok(state) => Some(Arc::new(state)),
|
||||
Err(e) => {
|
||||
eprintln!("Error initializing OIDC: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::info!("OIDC (SSO): disabled (no OIDC configuration provided)");
|
||||
None
|
||||
};
|
||||
|
||||
let bind_addr: std::net::SocketAddr = args.bind.parse().unwrap_or_else(|e| {
|
||||
eprintln!("Error: Invalid bind address '{}': {}", args.bind, e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
let state = Arc::new(web::AppState {
|
||||
pool,
|
||||
storage_dir: Arc::new(args.storage_dir),
|
||||
oidc: oidc_state,
|
||||
});
|
||||
|
||||
tracing::info!("Web player: http://{}", bind_addr);
|
||||
|
||||
let app = web::build_router(state);
|
||||
let listener = tokio::net::TcpListener::bind(bind_addr).await?;
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
298
furumi-web-player/src/web/api.rs
Normal file
298
furumi-web-player/src/web/api.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Path, Query, State},
|
||||
http::{HeaderMap, StatusCode, header},
|
||||
response::{IntoResponse, Json, Response},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use tokio::io::{AsyncReadExt, AsyncSeekExt};
|
||||
|
||||
use crate::db;
|
||||
use super::AppState;
|
||||
|
||||
type S = Arc<AppState>;
|
||||
|
||||
// --- Library browsing ---
|
||||
|
||||
pub async fn list_artists(State(state): State<S>) -> impl IntoResponse {
|
||||
match db::list_artists(&state.pool).await {
|
||||
Ok(artists) => (StatusCode::OK, Json(serde_json::to_value(artists).unwrap())).into_response(),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_artist(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
match db::get_artist(&state.pool, &slug).await {
|
||||
Ok(Some(artist)) => (StatusCode::OK, Json(serde_json::to_value(artist).unwrap())).into_response(),
|
||||
Ok(None) => error_json(StatusCode::NOT_FOUND, "artist not found"),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_artist_albums(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
match db::list_albums_by_artist(&state.pool, &slug).await {
|
||||
Ok(albums) => (StatusCode::OK, Json(serde_json::to_value(albums).unwrap())).into_response(),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_artist_all_tracks(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
match db::list_all_tracks_by_artist(&state.pool, &slug).await {
|
||||
Ok(tracks) => (StatusCode::OK, Json(serde_json::to_value(tracks).unwrap())).into_response(),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_track_detail(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
match db::get_track(&state.pool, &slug).await {
|
||||
Ok(Some(track)) => (StatusCode::OK, Json(serde_json::to_value(track).unwrap())).into_response(),
|
||||
Ok(None) => error_json(StatusCode::NOT_FOUND, "track not found"),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_album_tracks(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
match db::list_tracks_by_album(&state.pool, &slug).await {
|
||||
Ok(tracks) => (StatusCode::OK, Json(serde_json::to_value(tracks).unwrap())).into_response(),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Stream ---
|
||||
|
||||
pub async fn stream_track(
|
||||
State(state): State<S>,
|
||||
Path(slug): Path<String>,
|
||||
headers: HeaderMap,
|
||||
) -> impl IntoResponse {
|
||||
let track = match db::get_track(&state.pool, &slug).await {
|
||||
Ok(Some(t)) => t,
|
||||
Ok(None) => return error_json(StatusCode::NOT_FOUND, "track not found"),
|
||||
Err(e) => return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let file_path = std::path::Path::new(&track.storage_path);
|
||||
if !file_path.exists() {
|
||||
return error_json(StatusCode::NOT_FOUND, "file not found on disk");
|
||||
}
|
||||
|
||||
let file_size = match tokio::fs::metadata(file_path).await {
|
||||
Ok(m) => m.len(),
|
||||
Err(e) => return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let content_type = mime_guess::from_path(file_path)
|
||||
.first_or_octet_stream()
|
||||
.to_string();
|
||||
|
||||
// Parse Range header
|
||||
let range = headers.get(header::RANGE).and_then(|v| v.to_str().ok());
|
||||
|
||||
if let Some(range_str) = range {
|
||||
stream_range(file_path, file_size, &content_type, range_str).await
|
||||
} else {
|
||||
stream_full(file_path, file_size, &content_type).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn stream_full(path: &std::path::Path, size: u64, content_type: &str) -> Response {
|
||||
let file = match tokio::fs::File::open(path).await {
|
||||
Ok(f) => f,
|
||||
Err(e) => return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let stream = tokio_util::io::ReaderStream::new(file);
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, content_type)
|
||||
.header(header::CONTENT_LENGTH, size)
|
||||
.header(header::ACCEPT_RANGES, "bytes")
|
||||
.body(body)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
async fn stream_range(path: &std::path::Path, size: u64, content_type: &str, range_str: &str) -> Response {
|
||||
// Parse "bytes=START-END"
|
||||
let range = range_str.strip_prefix("bytes=").unwrap_or("");
|
||||
let parts: Vec<&str> = range.split('-').collect();
|
||||
|
||||
let start: u64 = parts.first().and_then(|s| s.parse().ok()).unwrap_or(0);
|
||||
let end: u64 = parts.get(1).and_then(|s| if s.is_empty() { None } else { s.parse().ok() }).unwrap_or(size - 1);
|
||||
|
||||
if start >= size || end >= size || start > end {
|
||||
return Response::builder()
|
||||
.status(StatusCode::RANGE_NOT_SATISFIABLE)
|
||||
.header(header::CONTENT_RANGE, format!("bytes */{}", size))
|
||||
.body(Body::empty())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let length = end - start + 1;
|
||||
|
||||
let mut file = match tokio::fs::File::open(path).await {
|
||||
Ok(f) => f,
|
||||
Err(e) => return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
if start > 0 {
|
||||
if let Err(e) = file.seek(std::io::SeekFrom::Start(start)).await {
|
||||
return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let limited = file.take(length);
|
||||
let stream = tokio_util::io::ReaderStream::new(limited);
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
Response::builder()
|
||||
.status(StatusCode::PARTIAL_CONTENT)
|
||||
.header(header::CONTENT_TYPE, content_type)
|
||||
.header(header::CONTENT_LENGTH, length)
|
||||
.header(header::CONTENT_RANGE, format!("bytes {}-{}/{}", start, end, size))
|
||||
.header(header::ACCEPT_RANGES, "bytes")
|
||||
.body(body)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// --- Cover art ---
|
||||
|
||||
pub async fn album_cover(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
serve_album_cover_by_slug(&state, &slug).await
|
||||
}
|
||||
|
||||
/// Cover for a specific track: album_images → embedded in file → 404
|
||||
pub async fn track_cover(State(state): State<S>, Path(slug): Path<String>) -> impl IntoResponse {
|
||||
let lookup = match db::get_track_cover_lookup(&state.pool, &slug).await {
|
||||
Ok(Some(l)) => l,
|
||||
Ok(None) => return error_json(StatusCode::NOT_FOUND, "track not found"),
|
||||
Err(e) => return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
// 1) Try album cover from DB
|
||||
if let Some(album_id) = lookup.album_id {
|
||||
if let Ok(Some(cover)) = db::get_album_cover_by_id(&state.pool, album_id).await {
|
||||
let path = std::path::Path::new(&cover.file_path);
|
||||
if path.exists() {
|
||||
if let Ok(data) = tokio::fs::read(path).await {
|
||||
return Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, &cover.mime_type)
|
||||
.header(header::CACHE_CONTROL, "public, max-age=86400")
|
||||
.body(Body::from(data))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Try extracting embedded cover from the audio file
|
||||
let file_path = std::path::PathBuf::from(&lookup.storage_path);
|
||||
if file_path.exists() {
|
||||
let result = tokio::task::spawn_blocking(move || extract_embedded_cover(&file_path)).await;
|
||||
if let Ok(Some((data, mime))) = result {
|
||||
return Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, mime)
|
||||
.header(header::CACHE_CONTROL, "public, max-age=86400")
|
||||
.body(Body::from(data))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
error_json(StatusCode::NOT_FOUND, "no cover art available")
|
||||
}
|
||||
|
||||
/// Extract embedded cover art from an audio file using Symphonia.
|
||||
fn extract_embedded_cover(path: &std::path::Path) -> Option<(Vec<u8>, String)> {
|
||||
use symphonia::core::{
|
||||
formats::FormatOptions,
|
||||
io::MediaSourceStream,
|
||||
meta::MetadataOptions,
|
||||
probe::Hint,
|
||||
};
|
||||
|
||||
let file = std::fs::File::open(path).ok()?;
|
||||
let mss = MediaSourceStream::new(Box::new(file), Default::default());
|
||||
|
||||
let mut hint = Hint::new();
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
|
||||
let mut probed = symphonia::default::get_probe().format(
|
||||
&hint,
|
||||
mss,
|
||||
&FormatOptions { enable_gapless: false, ..Default::default() },
|
||||
&MetadataOptions::default(),
|
||||
).ok()?;
|
||||
|
||||
// Check metadata side-data
|
||||
if let Some(rev) = probed.metadata.get().as_ref().and_then(|m| m.current()) {
|
||||
if let Some(visual) = rev.visuals().first() {
|
||||
return Some((visual.data.to_vec(), visual.media_type.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
// Check format-embedded metadata
|
||||
if let Some(rev) = probed.format.metadata().current() {
|
||||
if let Some(visual) = rev.visuals().first() {
|
||||
return Some((visual.data.to_vec(), visual.media_type.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
async fn serve_album_cover_by_slug(state: &AppState, slug: &str) -> Response {
|
||||
let cover = match db::get_album_cover(&state.pool, slug).await {
|
||||
Ok(Some(c)) => c,
|
||||
Ok(None) => return error_json(StatusCode::NOT_FOUND, "no cover"),
|
||||
Err(e) => return error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
};
|
||||
|
||||
let path = std::path::Path::new(&cover.file_path);
|
||||
if !path.exists() {
|
||||
return error_json(StatusCode::NOT_FOUND, "cover file missing");
|
||||
}
|
||||
|
||||
match tokio::fs::read(path).await {
|
||||
Ok(data) => Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, &cover.mime_type)
|
||||
.header(header::CACHE_CONTROL, "public, max-age=86400")
|
||||
.body(Body::from(data))
|
||||
.unwrap(),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Search ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SearchQuery {
|
||||
pub q: String,
|
||||
#[serde(default = "default_limit")]
|
||||
pub limit: i32,
|
||||
}
|
||||
|
||||
fn default_limit() -> i32 { 20 }
|
||||
|
||||
pub async fn search(State(state): State<S>, Query(q): Query<SearchQuery>) -> impl IntoResponse {
|
||||
if q.q.is_empty() {
|
||||
return (StatusCode::OK, Json(serde_json::json!([]))).into_response();
|
||||
}
|
||||
match db::search(&state.pool, &q.q, q.limit).await {
|
||||
Ok(results) => (StatusCode::OK, Json(serde_json::to_value(results).unwrap())).into_response(),
|
||||
Err(e) => error_json(StatusCode::INTERNAL_SERVER_ERROR, &e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
fn error_json(status: StatusCode, message: &str) -> Response {
|
||||
(status, Json(serde_json::json!({"error": message}))).into_response()
|
||||
}
|
||||
384
furumi-web-player/src/web/auth.rs
Normal file
384
furumi-web-player/src/web/auth.rs
Normal file
@@ -0,0 +1,384 @@
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Request, State},
|
||||
http::{header, HeaderMap, StatusCode},
|
||||
middleware::Next,
|
||||
response::{Html, IntoResponse, Redirect, Response},
|
||||
};
|
||||
use openidconnect::{
|
||||
core::{CoreClient, CoreProviderMetadata, CoreResponseType},
|
||||
reqwest::async_http_client,
|
||||
AuthenticationFlow, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce,
|
||||
PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, Scope, TokenResponse,
|
||||
};
|
||||
use rand::RngCore;
|
||||
use serde::Deserialize;
|
||||
|
||||
use base64::Engine;
|
||||
use hmac::{Hmac, Mac};
|
||||
|
||||
use super::AppState;
|
||||
use std::sync::Arc;
|
||||
|
||||
const SESSION_COOKIE: &str = "furumi_session";
|
||||
|
||||
type HmacSha256 = Hmac<sha2::Sha256>;
|
||||
|
||||
pub struct OidcState {
|
||||
pub client: CoreClient,
|
||||
pub session_secret: Vec<u8>,
|
||||
}
|
||||
|
||||
pub async fn oidc_init(
|
||||
issuer: String,
|
||||
client_id: String,
|
||||
client_secret: String,
|
||||
redirect: String,
|
||||
session_secret_override: Option<String>,
|
||||
) -> anyhow::Result<OidcState> {
|
||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||
IssuerUrl::new(issuer)?,
|
||||
async_http_client,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let client = CoreClient::from_provider_metadata(
|
||||
provider_metadata,
|
||||
ClientId::new(client_id),
|
||||
Some(ClientSecret::new(client_secret)),
|
||||
)
|
||||
.set_auth_type(openidconnect::AuthType::RequestBody)
|
||||
.set_redirect_uri(RedirectUrl::new(redirect)?);
|
||||
|
||||
let session_secret = if let Some(s) = session_secret_override {
|
||||
let mut b = s.into_bytes();
|
||||
b.resize(32, 0);
|
||||
b
|
||||
} else {
|
||||
let mut b = vec![0u8; 32];
|
||||
rand::thread_rng().fill_bytes(&mut b);
|
||||
b
|
||||
};
|
||||
|
||||
Ok(OidcState {
|
||||
client,
|
||||
session_secret,
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_sso_cookie(secret: &[u8], user_id: &str) -> String {
|
||||
let mut mac = HmacSha256::new_from_slice(secret).unwrap();
|
||||
mac.update(user_id.as_bytes());
|
||||
let sig = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
|
||||
format!("sso:{}:{}", user_id, sig)
|
||||
}
|
||||
|
||||
fn verify_sso_cookie(secret: &[u8], cookie_val: &str) -> Option<String> {
|
||||
let parts: Vec<&str> = cookie_val.split(':').collect();
|
||||
if parts.len() != 3 || parts[0] != "sso" {
|
||||
return None;
|
||||
}
|
||||
let user_id = parts[1];
|
||||
let sig = parts[2];
|
||||
|
||||
let mut mac = HmacSha256::new_from_slice(secret).unwrap();
|
||||
mac.update(user_id.as_bytes());
|
||||
|
||||
let expected_sig = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
|
||||
if sig == expected_sig {
|
||||
Some(user_id.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Auth middleware: requires valid SSO session cookie.
|
||||
pub async fn require_auth(
|
||||
State(state): State<Arc<AppState>>,
|
||||
req: Request,
|
||||
next: Next,
|
||||
) -> Response {
|
||||
let oidc = match &state.oidc {
|
||||
Some(o) => o,
|
||||
None => return next.run(req).await, // No OIDC configured = no auth
|
||||
};
|
||||
|
||||
let cookies = req
|
||||
.headers()
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
for c in cookies.split(';') {
|
||||
let c = c.trim();
|
||||
if let Some(val) = c.strip_prefix(&format!("{}=", SESSION_COOKIE)) {
|
||||
if verify_sso_cookie(&oidc.session_secret, val).is_some() {
|
||||
return next.run(req).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let uri = req.uri().to_string();
|
||||
if uri.starts_with("/api/") {
|
||||
(StatusCode::UNAUTHORIZED, "Unauthorized").into_response()
|
||||
} else {
|
||||
Redirect::to("/login").into_response()
|
||||
}
|
||||
}
|
||||
|
||||
/// GET /login — show SSO login page.
|
||||
pub async fn login_page(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||
if state.oidc.is_none() {
|
||||
return Redirect::to("/").into_response();
|
||||
}
|
||||
|
||||
Html(LOGIN_HTML).into_response()
|
||||
}
|
||||
|
||||
/// GET /logout — clear session cookie.
|
||||
pub async fn logout() -> impl IntoResponse {
|
||||
let cookie = format!(
|
||||
"{}=; HttpOnly; SameSite=Strict; Path=/; Expires=Thu, 01 Jan 1970 00:00:00 GMT",
|
||||
SESSION_COOKIE
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, cookie.parse().unwrap());
|
||||
headers.insert(header::LOCATION, "/login".parse().unwrap());
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LoginQuery {
|
||||
pub next: Option<String>,
|
||||
}
|
||||
|
||||
/// GET /auth/login — initiate OIDC flow.
|
||||
pub async fn oidc_login(
|
||||
State(state): State<Arc<AppState>>,
|
||||
axum::extract::Query(query): axum::extract::Query<LoginQuery>,
|
||||
req: Request,
|
||||
) -> impl IntoResponse {
|
||||
let oidc = match &state.oidc {
|
||||
Some(o) => o,
|
||||
None => return Redirect::to("/").into_response(),
|
||||
};
|
||||
|
||||
let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
|
||||
|
||||
let (auth_url, csrf_token, nonce) = oidc
|
||||
.client
|
||||
.authorize_url(
|
||||
AuthenticationFlow::<CoreResponseType>::AuthorizationCode,
|
||||
CsrfToken::new_random,
|
||||
Nonce::new_random,
|
||||
)
|
||||
.add_scope(Scope::new("openid".to_string()))
|
||||
.add_scope(Scope::new("profile".to_string()))
|
||||
.set_pkce_challenge(pkce_challenge)
|
||||
.url();
|
||||
|
||||
let next_url = query.next.unwrap_or_else(|| "/".to_string());
|
||||
let cookie_val = format!(
|
||||
"{}:{}:{}:{}",
|
||||
csrf_token.secret(),
|
||||
nonce.secret(),
|
||||
pkce_verifier.secret(),
|
||||
urlencoding::encode(&next_url)
|
||||
);
|
||||
|
||||
let is_https = req
|
||||
.headers()
|
||||
.get("x-forwarded-proto")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| s == "https")
|
||||
.unwrap_or(false);
|
||||
|
||||
let cookie_attrs = if is_https {
|
||||
"SameSite=None; Secure"
|
||||
} else {
|
||||
"SameSite=Lax"
|
||||
};
|
||||
|
||||
let cookie = format!(
|
||||
"furumi_oidc_state={}; HttpOnly; {}; Path=/; Max-Age=3600",
|
||||
cookie_val, cookie_attrs
|
||||
);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, cookie.parse().unwrap());
|
||||
headers.insert(header::LOCATION, auth_url.as_str().parse().unwrap());
|
||||
headers.insert(
|
||||
header::CACHE_CONTROL,
|
||||
"no-store, no-cache, must-revalidate".parse().unwrap(),
|
||||
);
|
||||
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthCallbackQuery {
|
||||
code: String,
|
||||
state: String,
|
||||
}
|
||||
|
||||
/// GET /auth/callback — handle OIDC callback.
|
||||
pub async fn oidc_callback(
|
||||
State(state): State<Arc<AppState>>,
|
||||
axum::extract::Query(query): axum::extract::Query<AuthCallbackQuery>,
|
||||
req: Request,
|
||||
) -> impl IntoResponse {
|
||||
let oidc = match &state.oidc {
|
||||
Some(o) => o,
|
||||
None => return Redirect::to("/").into_response(),
|
||||
};
|
||||
|
||||
let cookies = req
|
||||
.headers()
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
let mut matching_val = None;
|
||||
for c in cookies.split(';') {
|
||||
let c = c.trim();
|
||||
if let Some(val) = c.strip_prefix("furumi_oidc_state=") {
|
||||
let parts: Vec<&str> = val.split(':').collect();
|
||||
if parts.len() >= 3 && parts[0] == query.state {
|
||||
matching_val = Some(val.to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let cookie_val = match matching_val {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
tracing::warn!("OIDC callback: invalid state or missing cookie");
|
||||
return (StatusCode::BAD_REQUEST, "Invalid state").into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let parts: Vec<&str> = cookie_val.split(':').collect();
|
||||
let nonce = Nonce::new(parts[1].to_string());
|
||||
let pkce_verifier = PkceCodeVerifier::new(parts[2].to_string());
|
||||
|
||||
let token_response = oidc
|
||||
.client
|
||||
.exchange_code(AuthorizationCode::new(query.code))
|
||||
.set_pkce_verifier(pkce_verifier)
|
||||
.request_async(async_http_client)
|
||||
.await;
|
||||
|
||||
let token_response = match token_response {
|
||||
Ok(tr) => tr,
|
||||
Err(e) => {
|
||||
tracing::error!("OIDC token exchange error: {:?}", e);
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, format!("OIDC error: {}", e))
|
||||
.into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let id_token = match token_response.id_token() {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, "No ID token").into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let claims = match id_token.claims(&oidc.client.id_token_verifier(), &nonce) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (StatusCode::UNAUTHORIZED, format!("Invalid ID token: {}", e)).into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let user_id = claims
|
||||
.preferred_username()
|
||||
.map(|u| u.to_string())
|
||||
.or_else(|| claims.email().map(|e| e.to_string()))
|
||||
.unwrap_or_else(|| claims.subject().to_string());
|
||||
|
||||
let session_val = generate_sso_cookie(&oidc.session_secret, &user_id);
|
||||
|
||||
let redirect_to = parts
|
||||
.get(3)
|
||||
.and_then(|&s| urlencoding::decode(s).ok())
|
||||
.map(|v| v.into_owned())
|
||||
.unwrap_or_else(|| "/".to_string());
|
||||
let redirect_to = if redirect_to.is_empty() {
|
||||
"/".to_string()
|
||||
} else {
|
||||
redirect_to
|
||||
};
|
||||
|
||||
let is_https = req
|
||||
.headers()
|
||||
.get("x-forwarded-proto")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| s == "https")
|
||||
.unwrap_or(false);
|
||||
|
||||
let session_attrs = if is_https {
|
||||
"SameSite=Strict; Secure"
|
||||
} else {
|
||||
"SameSite=Strict"
|
||||
};
|
||||
|
||||
let session_cookie = format!(
|
||||
"{}={}; HttpOnly; {}; Path=/; Max-Age=604800",
|
||||
SESSION_COOKIE, session_val, session_attrs
|
||||
);
|
||||
let clear_state =
|
||||
"furumi_oidc_state=; HttpOnly; Path=/; Expires=Thu, 01 Jan 1970 00:00:00 GMT";
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::SET_COOKIE, session_cookie.parse().unwrap());
|
||||
headers.append(header::SET_COOKIE, clear_state.parse().unwrap());
|
||||
headers.insert(header::LOCATION, redirect_to.parse().unwrap());
|
||||
|
||||
(StatusCode::FOUND, headers, Body::empty()).into_response()
|
||||
}
|
||||
|
||||
const LOGIN_HTML: &str = r#"<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Furumi Player — Login</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
min-height: 100vh;
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
background: #0d0f14;
|
||||
font-family: 'Inter', system-ui, sans-serif;
|
||||
color: #e2e8f0;
|
||||
}
|
||||
.card {
|
||||
background: #161b27;
|
||||
border: 1px solid #2a3347;
|
||||
border-radius: 16px;
|
||||
padding: 2.5rem 3rem;
|
||||
width: 360px;
|
||||
box-shadow: 0 20px 60px rgba(0,0,0,0.5);
|
||||
text-align: center;
|
||||
}
|
||||
.logo { font-size: 1.8rem; font-weight: 700; color: #7c6af7; margin-bottom: 0.25rem; }
|
||||
.subtitle { font-size: 0.85rem; color: #64748b; margin-bottom: 2rem; }
|
||||
.btn-sso {
|
||||
display: block; width: 100%; padding: 0.75rem; text-align: center;
|
||||
background: #7c6af7; border: none; border-radius: 8px;
|
||||
color: #fff; font-size: 0.95rem; font-weight: 600; text-decoration: none;
|
||||
cursor: pointer; transition: background 0.2s;
|
||||
}
|
||||
.btn-sso:hover { background: #6b58e8; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="card">
|
||||
<div class="logo">Furumi</div>
|
||||
<div class="subtitle">Sign in to continue</div>
|
||||
<a href="/auth/login" class="btn-sso">SSO Login</a>
|
||||
</div>
|
||||
</body>
|
||||
</html>"#;
|
||||
57
furumi-web-player/src/web/mod.rs
Normal file
57
furumi-web-player/src/web/mod.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
pub mod api;
|
||||
pub mod auth;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use axum::{Router, routing::get, middleware};
|
||||
use sqlx::PgPool;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub pool: PgPool,
|
||||
#[allow(dead_code)]
|
||||
pub storage_dir: Arc<PathBuf>,
|
||||
pub oidc: Option<Arc<auth::OidcState>>,
|
||||
}
|
||||
|
||||
pub fn build_router(state: Arc<AppState>) -> Router {
|
||||
let library = Router::new()
|
||||
.route("/artists", get(api::list_artists))
|
||||
.route("/artists/:slug", get(api::get_artist))
|
||||
.route("/artists/:slug/albums", get(api::list_artist_albums))
|
||||
.route("/artists/:slug/tracks", get(api::list_artist_all_tracks))
|
||||
.route("/albums/:slug", get(api::get_album_tracks))
|
||||
.route("/albums/:slug/cover", get(api::album_cover))
|
||||
.route("/tracks/:slug", get(api::get_track_detail))
|
||||
.route("/tracks/:slug/cover", get(api::track_cover))
|
||||
.route("/stream/:slug", get(api::stream_track))
|
||||
.route("/search", get(api::search));
|
||||
|
||||
let authed = Router::new()
|
||||
.route("/", get(player_html))
|
||||
.nest("/api", library);
|
||||
|
||||
let has_oidc = state.oidc.is_some();
|
||||
|
||||
let app = if has_oidc {
|
||||
authed
|
||||
.route_layer(middleware::from_fn_with_state(state.clone(), auth::require_auth))
|
||||
} else {
|
||||
authed
|
||||
};
|
||||
|
||||
Router::new()
|
||||
.route("/login", get(auth::login_page))
|
||||
.route("/logout", get(auth::logout))
|
||||
.route("/auth/login", get(auth::oidc_login))
|
||||
.route("/auth/callback", get(auth::oidc_callback))
|
||||
.merge(app)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn player_html() -> axum::response::Html<String> {
|
||||
let html = include_str!("player.html")
|
||||
.replace("<!-- VERSION_PLACEHOLDER -->", option_env!("FURUMI_VERSION").unwrap_or(env!("CARGO_PKG_VERSION")));
|
||||
axum::response::Html(html)
|
||||
}
|
||||
591
furumi-web-player/src/web/player.html
Normal file
591
furumi-web-player/src/web/player.html
Normal file
@@ -0,0 +1,591 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Furumi Player</title>
|
||||
<style>
|
||||
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap');
|
||||
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
:root {
|
||||
--bg-base: #0a0c12;
|
||||
--bg-panel: #111520;
|
||||
--bg-card: #161d2e;
|
||||
--bg-hover: #1e2740;
|
||||
--bg-active: #252f4a;
|
||||
--border: #1f2c45;
|
||||
--accent: #7c6af7;
|
||||
--accent-dim: #5a4fcf;
|
||||
--accent-glow:rgba(124,106,247,0.3);
|
||||
--text: #e2e8f0;
|
||||
--text-muted: #64748b;
|
||||
--text-dim: #94a3b8;
|
||||
--success: #34d399;
|
||||
--danger: #f87171;
|
||||
}
|
||||
|
||||
html, body { height: 100%; overflow: hidden; }
|
||||
body { font-family: 'Inter', system-ui, sans-serif; background: var(--bg-base); color: var(--text); display: flex; flex-direction: column; }
|
||||
|
||||
.header { display: flex; align-items: center; justify-content: space-between; padding: 0.75rem 1.5rem; background: var(--bg-panel); border-bottom: 1px solid var(--border); flex-shrink: 0; z-index: 10; }
|
||||
.header-logo { display: flex; align-items: center; gap: 0.75rem; font-weight: 700; font-size: 1.1rem; }
|
||||
.header-logo svg { width: 22px; height: 22px; }
|
||||
.header-version { font-size: 0.7rem; color: var(--text-muted); background: rgba(255,255,255,0.05); padding: 0.1rem 0.4rem; border-radius: 4px; margin-left: 0.25rem; font-weight: 500; text-decoration: none; }
|
||||
.btn-menu { display: none; background: none; border: none; color: var(--text); font-size: 1.2rem; cursor: pointer; padding: 0.1rem 0.5rem; margin-right: 0.2rem; border-radius: 4px; }
|
||||
|
||||
/* Search bar */
|
||||
.search-wrap { position: relative; }
|
||||
.search-wrap input { background: var(--bg-card); border: 1px solid var(--border); border-radius: 6px; padding: 6px 12px 6px 30px; color: var(--text); font-size: 13px; width: 220px; font-family: inherit; }
|
||||
.search-wrap::before { content: '🔍'; position: absolute; left: 8px; top: 50%; transform: translateY(-50%); font-size: 12px; }
|
||||
.search-dropdown { position: absolute; top: 100%; left: 0; right: 0; background: var(--bg-card); border: 1px solid var(--border); border-radius: 0 0 6px 6px; max-height: 300px; overflow-y: auto; z-index: 50; display: none; }
|
||||
.search-dropdown.open { display: block; }
|
||||
.search-result { padding: 8px 12px; cursor: pointer; font-size: 13px; border-bottom: 1px solid var(--border); }
|
||||
.search-result:hover { background: var(--bg-hover); }
|
||||
.search-result .sr-type { font-size: 10px; color: var(--text-muted); text-transform: uppercase; margin-right: 6px; }
|
||||
.search-result .sr-detail { font-size: 11px; color: var(--text-muted); margin-left: 4px; }
|
||||
|
||||
.main { display: flex; flex: 1; overflow: hidden; position: relative; }
|
||||
.sidebar-overlay { display: none; position: absolute; top: 0; left: 0; right: 0; bottom: 0; background: rgba(0,0,0,0.6); z-index: 20; }
|
||||
.sidebar-overlay.show { display: block; }
|
||||
|
||||
.sidebar { width: 280px; min-width: 200px; max-width: 400px; flex-shrink: 0; display: flex; flex-direction: column; background: var(--bg-panel); border-right: 1px solid var(--border); overflow: hidden; resize: horizontal; }
|
||||
.sidebar-header { padding: 0.85rem 1rem 0.6rem; font-size: 0.7rem; font-weight: 600; letter-spacing: 0.08em; text-transform: uppercase; color: var(--text-muted); border-bottom: 1px solid var(--border); flex-shrink: 0; display: flex; align-items: center; gap: 0.5rem; }
|
||||
.breadcrumb { padding: 0.5rem 1rem; font-size: 0.78rem; color: var(--text-muted); white-space: nowrap; overflow: hidden; text-overflow: ellipsis; border-bottom: 1px solid var(--border); flex-shrink: 0; }
|
||||
.breadcrumb span { color: var(--accent); cursor: pointer; }
|
||||
.breadcrumb span:hover { text-decoration: underline; }
|
||||
|
||||
.file-list { flex: 1; overflow-y: auto; padding: 0.3rem 0; }
|
||||
.file-list::-webkit-scrollbar { width: 4px; }
|
||||
.file-list::-webkit-scrollbar-thumb { background: var(--border); border-radius: 4px; }
|
||||
|
||||
.file-item { display: flex; align-items: center; gap: 0.6rem; padding: 0.45rem 1rem; cursor: pointer; font-size: 0.875rem; color: var(--text-dim); user-select: none; transition: background 0.12s; }
|
||||
.file-item:hover { background: var(--bg-hover); color: var(--text); }
|
||||
.file-item.dir { color: var(--accent); }
|
||||
.file-item .icon { font-size: 0.95rem; flex-shrink: 0; opacity: 0.8; }
|
||||
.file-item .name { flex: 1; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.file-item .detail { font-size: 0.7rem; color: var(--text-muted); flex-shrink: 0; }
|
||||
.file-item .add-btn { opacity: 0; font-size: 0.75rem; background: var(--bg-hover); color: var(--text); border: 1px solid var(--border); border-radius: 4px; padding: 0.2rem 0.4rem; cursor: pointer; flex-shrink: 0; }
|
||||
.file-item:hover .add-btn { opacity: 1; }
|
||||
.file-item .add-btn:hover { background: var(--accent); color: #fff; border-color: var(--accent); }
|
||||
|
||||
.queue-panel { flex: 1; display: flex; flex-direction: column; overflow: hidden; background: var(--bg-base); }
|
||||
.queue-header { padding: 0.85rem 1.25rem 0.6rem; font-size: 0.7rem; font-weight: 600; letter-spacing: 0.08em; text-transform: uppercase; color: var(--text-muted); border-bottom: 1px solid var(--border); flex-shrink: 0; display: flex; align-items: center; justify-content: space-between; }
|
||||
.queue-actions { display: flex; gap: 0.5rem; }
|
||||
.queue-btn { font-size: 0.7rem; padding: 0.2rem 0.55rem; background: none; border: 1px solid var(--border); border-radius: 5px; color: var(--text-muted); cursor: pointer; }
|
||||
.queue-btn:hover { border-color: var(--accent); color: var(--accent); }
|
||||
.queue-btn.active { background: var(--accent); border-color: var(--accent); color: #fff; }
|
||||
|
||||
.queue-list { flex: 1; overflow-y: auto; padding: 0.3rem 0; }
|
||||
.queue-list::-webkit-scrollbar { width: 4px; }
|
||||
.queue-list::-webkit-scrollbar-thumb { background: var(--border); border-radius: 4px; }
|
||||
|
||||
.queue-item { display: flex; align-items: center; gap: 0.75rem; padding: 0.55rem 1.25rem; cursor: pointer; border-left: 2px solid transparent; transition: background 0.12s; }
|
||||
.queue-item:hover { background: var(--bg-hover); }
|
||||
.queue-item.playing { background: var(--bg-active); border-left-color: var(--accent); }
|
||||
.queue-item.playing .qi-title { color: var(--accent); }
|
||||
.queue-item .qi-index { font-size: 0.75rem; color: var(--text-muted); width: 1.5rem; text-align: right; flex-shrink: 0; }
|
||||
.queue-item.playing .qi-index::before { content: '▶'; font-size: 0.6rem; color: var(--accent); }
|
||||
.queue-item .qi-cover { width: 36px; height: 36px; border-radius: 5px; background: var(--bg-card); flex-shrink: 0; overflow: hidden; display: flex; align-items: center; justify-content: center; font-size: 1.1rem; }
|
||||
.queue-item .qi-cover img { width: 100%; height: 100%; object-fit: cover; }
|
||||
.queue-item .qi-info { flex: 1; overflow: hidden; }
|
||||
.queue-item .qi-title { font-size: 0.875rem; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.queue-item .qi-artist { font-size: 0.75rem; color: var(--text-muted); overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.queue-item .qi-dur { font-size: 0.75rem; color: var(--text-muted); margin-left: auto; margin-right: 0.5rem; }
|
||||
.qi-remove { background: none; border: none; font-size: 0.9rem; color: var(--text-muted); cursor: pointer; padding: 0.3rem; border-radius: 4px; opacity: 0; }
|
||||
.queue-item:hover .qi-remove { opacity: 1; }
|
||||
.qi-remove:hover { background: rgba(248,113,113,0.15); color: var(--danger); }
|
||||
.queue-item.dragging { opacity: 0.5; }
|
||||
.queue-item.drag-over { border-top: 2px solid var(--accent); margin-top: -2px; }
|
||||
.queue-empty { flex: 1; display: flex; flex-direction: column; align-items: center; justify-content: center; color: var(--text-muted); font-size: 0.875rem; gap: 0.5rem; padding: 2rem; }
|
||||
.queue-empty .empty-icon { font-size: 2.5rem; opacity: 0.3; }
|
||||
|
||||
.player-bar { background: var(--bg-panel); border-top: 1px solid var(--border); padding: 0.9rem 1.5rem; flex-shrink: 0; display: grid; grid-template-columns: 1fr 2fr 1fr; align-items: center; gap: 1rem; }
|
||||
.np-info { display: flex; align-items: center; gap: 0.75rem; min-width: 0; }
|
||||
.np-cover { width: 44px; height: 44px; border-radius: 6px; background: var(--bg-card); flex-shrink: 0; overflow: hidden; display: flex; align-items: center; justify-content: center; font-size: 1.3rem; }
|
||||
.np-cover img { width: 100%; height: 100%; object-fit: cover; }
|
||||
.np-text { min-width: 0; }
|
||||
.np-title { font-size: 0.875rem; font-weight: 500; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.np-artist { font-size: 0.75rem; color: var(--text-muted); overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
|
||||
.controls { display: flex; flex-direction: column; align-items: center; gap: 0.5rem; }
|
||||
.ctrl-btns { display: flex; align-items: center; gap: 0.5rem; }
|
||||
.ctrl-btn { background: none; border: none; color: var(--text-dim); cursor: pointer; padding: 0.35rem; border-radius: 50%; display: flex; align-items: center; justify-content: center; font-size: 1rem; }
|
||||
.ctrl-btn:hover { color: var(--text); background: var(--bg-hover); }
|
||||
.ctrl-btn.active { color: var(--accent); }
|
||||
.ctrl-btn-main { width: 38px; height: 38px; background: var(--accent); color: #fff !important; font-size: 1.1rem; box-shadow: 0 0 14px var(--accent-glow); }
|
||||
.ctrl-btn-main:hover { background: var(--accent-dim) !important; }
|
||||
|
||||
.progress-row { display: flex; align-items: center; gap: 0.6rem; width: 100%; }
|
||||
.time { font-size: 0.7rem; color: var(--text-muted); flex-shrink: 0; font-variant-numeric: tabular-nums; min-width: 2.5rem; text-align: center; }
|
||||
.progress-bar { flex: 1; height: 4px; background: var(--bg-hover); border-radius: 2px; cursor: pointer; position: relative; }
|
||||
.progress-fill { height: 100%; background: var(--accent); border-radius: 2px; pointer-events: none; }
|
||||
.progress-fill::after { content: ''; position: absolute; right: -5px; top: 50%; transform: translateY(-50%); width: 10px; height: 10px; border-radius: 50%; background: var(--accent); box-shadow: 0 0 6px var(--accent-glow); opacity: 0; transition: opacity 0.15s; }
|
||||
.progress-bar:hover .progress-fill::after { opacity: 1; }
|
||||
|
||||
.volume-row { display: flex; align-items: center; gap: 0.5rem; justify-content: flex-end; }
|
||||
.vol-icon { font-size: 0.9rem; color: var(--text-muted); cursor: pointer; }
|
||||
.volume-slider { -webkit-appearance: none; appearance: none; width: 80px; height: 4px; border-radius: 2px; background: var(--bg-hover); cursor: pointer; outline: none; }
|
||||
.volume-slider::-webkit-slider-thumb { -webkit-appearance: none; width: 12px; height: 12px; border-radius: 50%; background: var(--accent); cursor: pointer; }
|
||||
|
||||
* { scrollbar-width: thin; scrollbar-color: var(--border) transparent; }
|
||||
@keyframes spin { to { transform: rotate(360deg); } }
|
||||
.spinner { display: inline-block; width: 14px; height: 14px; border: 2px solid var(--border); border-top-color: var(--accent); border-radius: 50%; animation: spin 0.7s linear infinite; }
|
||||
.toast { position: fixed; bottom: 90px; right: 1.5rem; background: var(--bg-card); border: 1px solid var(--border); border-radius: 8px; padding: 0.6rem 1rem; font-size: 0.8rem; color: var(--text-dim); box-shadow: 0 8px 24px rgba(0,0,0,0.4); opacity: 0; transform: translateY(8px); transition: all 0.25s; pointer-events: none; z-index: 100; }
|
||||
.toast.show { opacity: 1; transform: translateY(0); }
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.btn-menu { display: inline-block; }
|
||||
.header { padding: 0.75rem 1rem; }
|
||||
.sidebar { position: absolute; top: 0; bottom: 0; left: -100%; width: 85%; max-width: 320px; z-index: 30; transition: left 0.3s; box-shadow: 4px 0 20px rgba(0,0,0,0.6); }
|
||||
.sidebar.open { left: 0; }
|
||||
.player-bar { grid-template-columns: 1fr; gap: 0.75rem; padding: 0.75rem 1rem; }
|
||||
.volume-row { display: none; }
|
||||
.search-wrap input { width: 140px; }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<header class="header">
|
||||
<div class="header-logo">
|
||||
<button class="btn-menu" onclick="toggleSidebar()">☰</button>
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><circle cx="9" cy="18" r="3"/><circle cx="18" cy="15" r="3"/><path d="M12 18V6l9-3v3"/></svg>
|
||||
Furumi
|
||||
<span class="header-version">v<!-- VERSION_PLACEHOLDER --></span>
|
||||
</div>
|
||||
<div style="display:flex;align-items:center;gap:1rem">
|
||||
<div class="search-wrap">
|
||||
<input id="searchInput" placeholder="Search..." oninput="onSearch(this.value)" onkeydown="if(event.key==='Escape'){closeSearch();}">
|
||||
<div class="search-dropdown" id="searchDropdown"></div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="main">
|
||||
<div class="sidebar-overlay" id="sidebarOverlay" onclick="toggleSidebar()"></div>
|
||||
<aside class="sidebar" id="sidebar">
|
||||
<div class="sidebar-header">Library</div>
|
||||
<div class="breadcrumb" id="breadcrumb"><span onclick="showArtists()">Artists</span></div>
|
||||
<div class="file-list" id="fileList"></div>
|
||||
</aside>
|
||||
|
||||
<section class="queue-panel">
|
||||
<div class="queue-header">
|
||||
<span>Queue</span>
|
||||
<div class="queue-actions">
|
||||
<button class="queue-btn active" id="btnShuffle" onclick="toggleShuffle()">Shuffle</button>
|
||||
<button class="queue-btn active" id="btnRepeat" onclick="toggleRepeat()">Repeat</button>
|
||||
<button class="queue-btn" onclick="clearQueue()">Clear</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="queue-list" id="queueList">
|
||||
<div class="queue-empty"><div class="empty-icon">🎵</div><div>Select an album to start</div></div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<div class="player-bar">
|
||||
<div class="np-info">
|
||||
<div class="np-cover" id="npCover">🎵</div>
|
||||
<div class="np-text">
|
||||
<div class="np-title" id="npTitle">Nothing playing</div>
|
||||
<div class="np-artist" id="npArtist">—</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="controls">
|
||||
<div class="ctrl-btns">
|
||||
<button class="ctrl-btn" onclick="prevTrack()">⏮</button>
|
||||
<button class="ctrl-btn ctrl-btn-main" id="btnPlayPause" onclick="togglePlay()">▶</button>
|
||||
<button class="ctrl-btn" onclick="nextTrack()">⏭</button>
|
||||
</div>
|
||||
<div class="progress-row">
|
||||
<span class="time" id="timeElapsed">0:00</span>
|
||||
<div class="progress-bar" id="progressBar" onclick="seekTo(event)">
|
||||
<div class="progress-fill" id="progressFill" style="width:0%"></div>
|
||||
</div>
|
||||
<span class="time" id="timeDuration">0:00</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="volume-row">
|
||||
<span class="vol-icon" onclick="toggleMute()" id="volIcon">🔊</span>
|
||||
<input type="range" class="volume-slider" id="volSlider" min="0" max="100" value="80" oninput="setVolume(this.value)">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="toast" id="toast"></div>
|
||||
<audio id="audioEl"></audio>
|
||||
|
||||
<script>
|
||||
const audio = document.getElementById('audioEl');
|
||||
let queue = []; // [{slug, title, artist, album_slug, duration, cover}]
|
||||
let queueIndex = -1;
|
||||
let shuffle = false;
|
||||
let repeatAll = true;
|
||||
let shuffleOrder = [];
|
||||
let searchTimer = null;
|
||||
|
||||
// Restore prefs
|
||||
try {
|
||||
const v = localStorage.getItem('furumi_vol');
|
||||
if (v !== null) { audio.volume = v / 100; document.getElementById('volSlider').value = v; }
|
||||
shuffle = localStorage.getItem('furumi_shuffle') === '1';
|
||||
repeatAll = localStorage.getItem('furumi_repeat') !== '0';
|
||||
document.getElementById('btnShuffle').classList.toggle('active', shuffle);
|
||||
document.getElementById('btnRepeat').classList.toggle('active', repeatAll);
|
||||
} catch(e) {}
|
||||
|
||||
// --- Audio events ---
|
||||
audio.addEventListener('timeupdate', () => {
|
||||
if (audio.duration) {
|
||||
document.getElementById('progressFill').style.width = (audio.currentTime / audio.duration * 100) + '%';
|
||||
document.getElementById('timeElapsed').textContent = fmt(audio.currentTime);
|
||||
document.getElementById('timeDuration').textContent = fmt(audio.duration);
|
||||
}
|
||||
});
|
||||
audio.addEventListener('ended', () => nextTrack());
|
||||
audio.addEventListener('play', () => document.getElementById('btnPlayPause').innerHTML = '⏸');
|
||||
audio.addEventListener('pause', () => document.getElementById('btnPlayPause').innerHTML = '▶');
|
||||
audio.addEventListener('error', () => { showToast('Playback error'); nextTrack(); });
|
||||
|
||||
// --- API helper ---
|
||||
const _base = location.pathname.replace(/\/+$/, '');
|
||||
const API = _base + '/api';
|
||||
async function api(path) {
|
||||
const r = await fetch(API + path);
|
||||
if (!r.ok) return null;
|
||||
return r.json();
|
||||
}
|
||||
|
||||
// --- Library navigation ---
|
||||
async function showArtists() {
|
||||
setBreadcrumb([{label: 'Artists', action: 'showArtists()'}]);
|
||||
const el = document.getElementById('fileList');
|
||||
el.innerHTML = '<div style="padding:2rem;text-align:center"><div class="spinner"></div></div>';
|
||||
const artists = await api('/artists');
|
||||
if (!artists) { el.innerHTML = '<div style="padding:1rem;color:var(--danger)">Error</div>'; return; }
|
||||
el.innerHTML = '';
|
||||
artists.forEach(a => {
|
||||
const div = document.createElement('div');
|
||||
div.className = 'file-item dir';
|
||||
div.innerHTML = `<span class="icon">👤</span><span class="name">${esc(a.name)}</span><span class="detail">${a.album_count} albums</span>`;
|
||||
div.onclick = () => showArtistAlbums(a.slug, a.name);
|
||||
el.appendChild(div);
|
||||
});
|
||||
}
|
||||
|
||||
async function showArtistAlbums(artistSlug, artistName) {
|
||||
setBreadcrumb([
|
||||
{label: 'Artists', action: 'showArtists()'},
|
||||
{label: artistName, action: `showArtistAlbums('${artistSlug}','${esc(artistName)}')`}
|
||||
]);
|
||||
const el = document.getElementById('fileList');
|
||||
el.innerHTML = '<div style="padding:2rem;text-align:center"><div class="spinner"></div></div>';
|
||||
const albums = await api('/artists/' + artistSlug + '/albums');
|
||||
if (!albums) { el.innerHTML = '<div style="padding:1rem;color:var(--danger)">Error</div>'; return; }
|
||||
el.innerHTML = '';
|
||||
|
||||
// "Play all" button
|
||||
const allBtn = document.createElement('div');
|
||||
allBtn.className = 'file-item';
|
||||
allBtn.innerHTML = '<span class="icon">▶</span><span class="name" style="color:var(--accent);font-weight:500">Play all tracks</span>';
|
||||
allBtn.onclick = () => playAllArtistTracks(artistSlug);
|
||||
el.appendChild(allBtn);
|
||||
|
||||
albums.forEach(a => {
|
||||
const div = document.createElement('div');
|
||||
div.className = 'file-item dir';
|
||||
const year = a.year ? `(${a.year})` : '';
|
||||
div.innerHTML = `<span class="icon">💿</span><span class="name">${esc(a.name)} ${year}</span>
|
||||
<span class="detail">${a.track_count} tracks</span>
|
||||
<button class="add-btn" title="Add album to queue">➕</button>`;
|
||||
div.querySelector('.add-btn').onclick = (ev) => { ev.stopPropagation(); addAlbumToQueue(a.slug); };
|
||||
div.onclick = () => showAlbumTracks(a.slug, a.name, artistSlug, artistName);
|
||||
el.appendChild(div);
|
||||
});
|
||||
}
|
||||
|
||||
async function showAlbumTracks(albumSlug, albumName, artistSlug, artistName) {
|
||||
setBreadcrumb([
|
||||
{label: 'Artists', action: 'showArtists()'},
|
||||
{label: artistName, action: `showArtistAlbums('${artistSlug}','${esc(artistName)}')`},
|
||||
{label: albumName}
|
||||
]);
|
||||
const el = document.getElementById('fileList');
|
||||
el.innerHTML = '<div style="padding:2rem;text-align:center"><div class="spinner"></div></div>';
|
||||
const tracks = await api('/albums/' + albumSlug);
|
||||
if (!tracks) { el.innerHTML = '<div style="padding:1rem;color:var(--danger)">Error</div>'; return; }
|
||||
el.innerHTML = '';
|
||||
|
||||
// "Play album" button
|
||||
const allBtn = document.createElement('div');
|
||||
allBtn.className = 'file-item';
|
||||
allBtn.innerHTML = '<span class="icon">▶</span><span class="name" style="color:var(--accent);font-weight:500">Play album</span>';
|
||||
allBtn.onclick = () => addAlbumToQueue(albumSlug, true);
|
||||
el.appendChild(allBtn);
|
||||
|
||||
const coverUrl = API + '/albums/' + albumSlug + '/cover';
|
||||
|
||||
tracks.forEach(t => {
|
||||
const div = document.createElement('div');
|
||||
div.className = 'file-item';
|
||||
const num = t.track_number ? t.track_number + '. ' : '';
|
||||
const dur = t.duration_secs ? fmt(t.duration_secs) : '';
|
||||
div.innerHTML = `<span class="icon">🎵</span><span class="name">${num}${esc(t.title)}</span>
|
||||
<span class="detail">${dur}</span>`;
|
||||
div.onclick = () => {
|
||||
addTrackToQueue({slug: t.slug, title: t.title, artist: t.artist_name, album_slug: albumSlug, duration: t.duration_secs}, true);
|
||||
};
|
||||
el.appendChild(div);
|
||||
});
|
||||
}
|
||||
|
||||
function setBreadcrumb(parts) {
|
||||
const el = document.getElementById('breadcrumb');
|
||||
el.innerHTML = parts.map((p, i) => {
|
||||
if (i < parts.length - 1 && p.action) {
|
||||
return `<span onclick="${p.action}">${esc(p.label)}</span>`;
|
||||
}
|
||||
return esc(p.label);
|
||||
}).join(' / ');
|
||||
}
|
||||
|
||||
// --- Queue management ---
|
||||
function addTrackToQueue(track, playNow) {
|
||||
const existing = queue.findIndex(t => t.slug === track.slug);
|
||||
if (existing !== -1) {
|
||||
if (playNow) playIndex(existing);
|
||||
return;
|
||||
}
|
||||
queue.push(track);
|
||||
renderQueue();
|
||||
if (playNow || (queueIndex === -1 && queue.length === 1)) {
|
||||
playIndex(queue.length - 1);
|
||||
}
|
||||
}
|
||||
|
||||
async function addAlbumToQueue(albumSlug, playFirst) {
|
||||
const tracks = await api('/albums/' + albumSlug);
|
||||
if (!tracks || !tracks.length) return;
|
||||
let firstIdx = queue.length;
|
||||
tracks.forEach(t => {
|
||||
if (queue.find(q => q.slug === t.slug)) return;
|
||||
queue.push({slug: t.slug, title: t.title, artist: t.artist_name, album_slug: albumSlug, duration: t.duration_secs});
|
||||
});
|
||||
renderQueue();
|
||||
if (playFirst || queueIndex === -1) playIndex(firstIdx);
|
||||
showToast(`Added ${tracks.length} tracks`);
|
||||
}
|
||||
|
||||
async function playAllArtistTracks(artistSlug) {
|
||||
const tracks = await api('/artists/' + artistSlug + '/tracks');
|
||||
if (!tracks || !tracks.length) return;
|
||||
clearQueue();
|
||||
tracks.forEach(t => {
|
||||
queue.push({slug: t.slug, title: t.title, artist: t.artist_name, album_slug: t.album_slug, duration: t.duration_secs});
|
||||
});
|
||||
renderQueue();
|
||||
playIndex(0);
|
||||
showToast(`Added ${tracks.length} tracks`);
|
||||
}
|
||||
|
||||
function playIndex(i) {
|
||||
if (i < 0 || i >= queue.length) return;
|
||||
queueIndex = i;
|
||||
const track = queue[i];
|
||||
audio.src = API + '/stream/' + track.slug;
|
||||
audio.play().catch(() => {});
|
||||
updateNowPlaying(track);
|
||||
renderQueue();
|
||||
scrollQueueToActive();
|
||||
history.replaceState(null, '', '?t=' + track.slug);
|
||||
}
|
||||
|
||||
function updateNowPlaying(track) {
|
||||
if (!track) { document.getElementById('npTitle').textContent = 'Nothing playing'; document.getElementById('npArtist').textContent = '\u2014'; return; }
|
||||
document.getElementById('npTitle').textContent = track.title;
|
||||
document.getElementById('npArtist').textContent = track.artist || '\u2014';
|
||||
document.title = track.title + ' \u2014 Furumi';
|
||||
|
||||
const cover = document.getElementById('npCover');
|
||||
const coverUrl = API + '/tracks/' + track.slug + '/cover';
|
||||
cover.innerHTML = `<img src="${coverUrl}" alt="" onerror="this.parentElement.innerHTML='🎵'">`;
|
||||
|
||||
if ('mediaSession' in navigator) {
|
||||
navigator.mediaSession.metadata = new MediaMetadata({
|
||||
title: track.title,
|
||||
artist: track.artist || '',
|
||||
album: '',
|
||||
artwork: [{src: coverUrl, sizes: '512x512'}]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function renderQueue() {
|
||||
const el = document.getElementById('queueList');
|
||||
if (!queue.length) {
|
||||
el.innerHTML = '<div class="queue-empty"><div class="empty-icon">🎵</div><div>Select an album to start</div></div>';
|
||||
return;
|
||||
}
|
||||
const order = currentOrder();
|
||||
el.innerHTML = '';
|
||||
order.forEach((origIdx, pos) => {
|
||||
const t = queue[origIdx];
|
||||
const isPlaying = origIdx === queueIndex;
|
||||
const div = document.createElement('div');
|
||||
div.className = 'queue-item' + (isPlaying ? ' playing' : '');
|
||||
|
||||
const coverSrc = t.album_slug ? `${API}/tracks/${t.slug}/cover` : '';
|
||||
const coverHtml = coverSrc
|
||||
? `<img src="${coverSrc}" alt="" onerror="this.parentElement.innerHTML='🎵'">`
|
||||
: '🎵';
|
||||
const dur = t.duration ? fmt(t.duration) : '';
|
||||
|
||||
div.innerHTML = `
|
||||
<span class="qi-index">${isPlaying ? '' : pos + 1}</span>
|
||||
<div class="qi-cover">${coverHtml}</div>
|
||||
<div class="qi-info"><div class="qi-title">${esc(t.title)}</div><div class="qi-artist">${esc(t.artist || '')}</div></div>
|
||||
<span class="qi-dur">${dur}</span>
|
||||
<button class="qi-remove" onclick="removeFromQueue(${origIdx},event)">✕</button>
|
||||
`;
|
||||
div.addEventListener('click', () => playIndex(origIdx));
|
||||
|
||||
// Drag & drop
|
||||
div.draggable = true;
|
||||
div.addEventListener('dragstart', e => { e.dataTransfer.setData('text/plain', pos); div.classList.add('dragging'); });
|
||||
div.addEventListener('dragend', () => { div.classList.remove('dragging'); el.querySelectorAll('.queue-item').forEach(x => x.classList.remove('drag-over')); });
|
||||
div.addEventListener('dragover', e => { e.preventDefault(); });
|
||||
div.addEventListener('dragenter', () => div.classList.add('drag-over'));
|
||||
div.addEventListener('dragleave', () => div.classList.remove('drag-over'));
|
||||
div.addEventListener('drop', e => { e.preventDefault(); div.classList.remove('drag-over'); const from = parseInt(e.dataTransfer.getData('text/plain')); if (!isNaN(from)) moveQueueItem(from, pos); });
|
||||
|
||||
el.appendChild(div);
|
||||
});
|
||||
}
|
||||
|
||||
function scrollQueueToActive() {
|
||||
const el = document.querySelector('.queue-item.playing');
|
||||
if (el) el.scrollIntoView({behavior: 'smooth', block: 'nearest'});
|
||||
}
|
||||
|
||||
function removeFromQueue(idx, ev) {
|
||||
if (ev) ev.stopPropagation();
|
||||
if (idx === queueIndex) { queueIndex = -1; audio.pause(); audio.src = ''; updateNowPlaying(null); }
|
||||
else if (queueIndex > idx) queueIndex--;
|
||||
queue.splice(idx, 1);
|
||||
if (shuffle) { const si = shuffleOrder.indexOf(idx); if (si !== -1) shuffleOrder.splice(si, 1); for (let i = 0; i < shuffleOrder.length; i++) if (shuffleOrder[i] > idx) shuffleOrder[i]--; }
|
||||
renderQueue();
|
||||
}
|
||||
|
||||
function moveQueueItem(from, to) {
|
||||
if (from === to) return;
|
||||
if (shuffle) { const item = shuffleOrder.splice(from, 1)[0]; shuffleOrder.splice(to, 0, item); }
|
||||
else { const item = queue.splice(from, 1)[0]; queue.splice(to, 0, item); if (queueIndex === from) queueIndex = to; else if (from < queueIndex && to >= queueIndex) queueIndex--; else if (from > queueIndex && to <= queueIndex) queueIndex++; }
|
||||
renderQueue();
|
||||
}
|
||||
|
||||
function clearQueue() {
|
||||
queue = []; queueIndex = -1; shuffleOrder = [];
|
||||
audio.pause(); audio.src = '';
|
||||
updateNowPlaying(null);
|
||||
document.title = 'Furumi Player';
|
||||
renderQueue();
|
||||
}
|
||||
|
||||
// --- Playback controls ---
|
||||
function togglePlay() {
|
||||
if (!audio.src && queue.length) { playIndex(queueIndex === -1 ? 0 : queueIndex); return; }
|
||||
if (audio.paused) audio.play(); else audio.pause();
|
||||
}
|
||||
function nextTrack() {
|
||||
if (!queue.length) return;
|
||||
const order = currentOrder();
|
||||
const pos = order.indexOf(queueIndex);
|
||||
if (pos < order.length - 1) playIndex(order[pos + 1]);
|
||||
else if (repeatAll) { if (shuffle) buildShuffleOrder(); playIndex(currentOrder()[0]); }
|
||||
}
|
||||
function prevTrack() {
|
||||
if (!queue.length) return;
|
||||
if (audio.currentTime > 3) { audio.currentTime = 0; return; }
|
||||
const order = currentOrder();
|
||||
const pos = order.indexOf(queueIndex);
|
||||
if (pos > 0) playIndex(order[pos - 1]);
|
||||
else if (repeatAll) playIndex(order[order.length - 1]);
|
||||
}
|
||||
function toggleShuffle() { shuffle = !shuffle; if (shuffle) buildShuffleOrder(); document.getElementById('btnShuffle').classList.toggle('active', shuffle); localStorage.setItem('furumi_shuffle', shuffle ? '1' : '0'); renderQueue(); }
|
||||
function toggleRepeat() { repeatAll = !repeatAll; document.getElementById('btnRepeat').classList.toggle('active', repeatAll); localStorage.setItem('furumi_repeat', repeatAll ? '1' : '0'); }
|
||||
function buildShuffleOrder() { shuffleOrder = [...Array(queue.length).keys()]; for (let i = shuffleOrder.length - 1; i > 0; i--) { const j = Math.floor(Math.random() * (i + 1)); [shuffleOrder[i], shuffleOrder[j]] = [shuffleOrder[j], shuffleOrder[i]]; } if (queueIndex !== -1) { const ci = shuffleOrder.indexOf(queueIndex); if (ci > 0) { shuffleOrder.splice(ci, 1); shuffleOrder.unshift(queueIndex); } } }
|
||||
function currentOrder() { if (!shuffle) return [...Array(queue.length).keys()]; if (shuffleOrder.length !== queue.length) buildShuffleOrder(); return shuffleOrder; }
|
||||
|
||||
// --- Seek & Volume ---
|
||||
function seekTo(e) { if (!audio.duration) return; const bar = document.getElementById('progressBar'); const pct = (e.clientX - bar.getBoundingClientRect().left) / bar.offsetWidth; audio.currentTime = pct * audio.duration; }
|
||||
let muted = false;
|
||||
function toggleMute() { muted = !muted; audio.muted = muted; document.getElementById('volIcon').innerHTML = muted ? '🔇' : '🔊'; }
|
||||
function setVolume(v) { audio.volume = v / 100; document.getElementById('volIcon').innerHTML = v == 0 ? '🔇' : '🔊'; localStorage.setItem('furumi_vol', v); }
|
||||
|
||||
// --- Search ---
|
||||
function onSearch(q) {
|
||||
clearTimeout(searchTimer);
|
||||
if (q.length < 2) { closeSearch(); return; }
|
||||
searchTimer = setTimeout(async () => {
|
||||
const results = await api('/search?q=' + encodeURIComponent(q));
|
||||
if (!results || !results.length) { closeSearch(); return; }
|
||||
const dd = document.getElementById('searchDropdown');
|
||||
dd.innerHTML = results.map(r => {
|
||||
const detail = r.detail ? `<span class="sr-detail">${esc(r.detail)}</span>` : '';
|
||||
return `<div class="search-result" onclick="onSearchSelect('${r.result_type}','${r.slug}')"><span class="sr-type">${r.result_type}</span>${esc(r.name)}${detail}</div>`;
|
||||
}).join('');
|
||||
dd.classList.add('open');
|
||||
}, 250);
|
||||
}
|
||||
function closeSearch() { document.getElementById('searchDropdown').classList.remove('open'); }
|
||||
function onSearchSelect(type, slug) {
|
||||
closeSearch();
|
||||
document.getElementById('searchInput').value = '';
|
||||
if (type === 'artist') showArtistAlbums(slug, '');
|
||||
else if (type === 'album') addAlbumToQueue(slug, true);
|
||||
else if (type === 'track') {
|
||||
addTrackToQueue({slug, title: '', artist: '', album_slug: null, duration: null}, true);
|
||||
// Fetch full info
|
||||
api('/stream/' + slug).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
// --- Helpers ---
|
||||
function fmt(secs) { if (!secs || isNaN(secs)) return '0:00'; const s = Math.floor(secs); const m = Math.floor(s / 60); const h = Math.floor(m / 60); if (h > 0) return h + ':' + pad(m % 60) + ':' + pad(s % 60); return m + ':' + pad(s % 60); }
|
||||
function pad(n) { return String(n).padStart(2, '0'); }
|
||||
function esc(s) { return String(s||'').replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>').replace(/"/g,'"').replace(/'/g,'''); }
|
||||
let toastTimer;
|
||||
function showToast(msg) { const t = document.getElementById('toast'); t.textContent = msg; t.classList.add('show'); clearTimeout(toastTimer); toastTimer = setTimeout(() => t.classList.remove('show'), 2500); }
|
||||
function toggleSidebar() { document.getElementById('sidebar').classList.toggle('open'); document.getElementById('sidebarOverlay').classList.toggle('show'); }
|
||||
|
||||
// --- MediaSession ---
|
||||
if ('mediaSession' in navigator) {
|
||||
navigator.mediaSession.setActionHandler('play', togglePlay);
|
||||
navigator.mediaSession.setActionHandler('pause', togglePlay);
|
||||
navigator.mediaSession.setActionHandler('previoustrack', prevTrack);
|
||||
navigator.mediaSession.setActionHandler('nexttrack', nextTrack);
|
||||
navigator.mediaSession.setActionHandler('seekto', d => { audio.currentTime = d.seekTime; });
|
||||
}
|
||||
|
||||
// --- Init ---
|
||||
(async () => {
|
||||
const urlSlug = new URLSearchParams(window.location.search).get('t');
|
||||
if (urlSlug) {
|
||||
const info = await api('/tracks/' + urlSlug);
|
||||
if (info) {
|
||||
addTrackToQueue({
|
||||
slug: info.slug,
|
||||
title: info.title,
|
||||
artist: info.artist_name,
|
||||
album_slug: info.album_slug,
|
||||
duration: info.duration_secs
|
||||
}, true);
|
||||
}
|
||||
}
|
||||
showArtists();
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
56
windows-implementation-plan.md
Normal file
56
windows-implementation-plan.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# Implementation Plan for `furumi-mount-windows` Client
|
||||
|
||||
## Architectural Decision
|
||||
|
||||
- **VFS Driver:** `WinFSP` (Windows File System Proxy).
|
||||
- **Justification:** Excellent performance, perfect compatibility with the FUSE model, widely used in similar projects (e.g., rclone, sshfs-win).
|
||||
- **Installation:** A unified installer (bundle) will be created (for example, using Inno Setup or WiX Toolkit), which will:
|
||||
- Check if WinFSP is already installed.
|
||||
- Automatically install the official `winfsp.msi` silently (using `/qn` flags) if the driver is missing.
|
||||
- Install the `furumi-mount-windows.exe` client itself.
|
||||
|
||||
---
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### 1. Application Scaffold
|
||||
- Create a new binary crate `furumi-mount-windows` within the workspace.
|
||||
- Add dependencies: `winfsp` (or `wfd`), `tokio`, `clap`, `tracing`, and an internal dependency on `furumi-client-core`.
|
||||
|
||||
### 2. Entry Point (CLI)
|
||||
- In `main.rs`, configure parsing for command-line arguments and environment variables (`--server`, `--token`, `--mount`), similar to `furumi-mount-macos`.
|
||||
- Initialize the gRPC connection to the server via `furumi-client-core`.
|
||||
- Configure directory mounting:
|
||||
- As a network drive (e.g., `Z:`).
|
||||
- Or as a transparent folder within an existing NTFS filesystem (depending on driver support/flags).
|
||||
|
||||
### 3. VFS Implementation
|
||||
- Create an `fs.rs` module.
|
||||
- Implement the trait or callback structure required by WinFSP (e.g., the `WinFspFileSystem` structure).
|
||||
- Action mapping:
|
||||
- `GetFileInfo` / `GetSecurityByName` → gRPC `GetAttr` call.
|
||||
- `ReadDirectory` → Streaming gRPC `ReadDir` call.
|
||||
- `ReadFile` → `ReadFile` gRPC call (with support for stream chunking).
|
||||
- **Crucial Part:** Translating Unix file attributes (from gRPC) into Windows File Attributes to ensure the system permits high-performance continuous stream reading (especially for media).
|
||||
|
||||
### 4. Installer Creation
|
||||
- Write a configuration script for a Windows installer builder (e.g., `windows/setup.iss` for Inno Setup).
|
||||
- Neatly bundle both `winfsp-x.y.z.msi` and `furumi-mount-windows.exe` together.
|
||||
- Add Custom Actions / Logic to:
|
||||
- Check the Windows Registry for an existing WinFSP installation.
|
||||
- Trigger the `winfsp.msi` installation conditionally.
|
||||
|
||||
### 5. CI/CD Integration
|
||||
- Update the GitHub Actions workflow (`docker-publish.yml` or create a dedicated release workflow).
|
||||
- Add the target toolchain: `x86_64-pc-windows-msvc`.
|
||||
- Add a step to compile: `cargo build --release --bin furumi-mount-windows`.
|
||||
- Add a step to build the installer (e.g., `iscc setup.iss` or via `cargo-wix`).
|
||||
- Output the final `setup.exe` as a GitHub Release artifact alongside other binaries.
|
||||
|
||||
### 6. Testing Strategy
|
||||
- Write unit tests in Rust covering attribute translation and path mapping (mapping slashes `/` to backslashes `\`).
|
||||
- Manual System Testing:
|
||||
- Start `furumi-server` locally.
|
||||
- Run the installer on a clean Windows machine (VM without pre-installed WinFSP).
|
||||
- Verify that the drive mounts correctly and seamlessly.
|
||||
- Launch media playback (e.g., via VLC/mpv) to ensure streaming stability over the VFS connection.
|
||||
Reference in New Issue
Block a user