mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-03 06:15:22 +00:00
Compare commits
34 Commits
dd5181c644
...
ecf5837409
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ecf5837409 | ||
|
|
fc9817552d | ||
|
|
0c1b65d3e6 | ||
|
|
47b448c64f | ||
|
|
834fa494e4 | ||
|
|
5d34640065 | ||
|
|
9ed309ac81 | ||
|
|
8c80be56da | ||
|
|
cde5992c46 | ||
|
|
017676c457 | ||
|
|
2d7b716834 | ||
|
|
c7ac0e4414 | ||
|
|
c9409d306a | ||
|
|
ebbe62bbbd | ||
|
|
42c85a18e2 | ||
|
|
7ccf44b8ed | ||
|
|
603cccde11 | ||
|
|
6ed6524752 | ||
|
|
a081569ed4 | ||
|
|
e923c02c6a | ||
|
|
51ca2dee65 | ||
|
|
6b961bd99d | ||
|
|
396eee48c6 | ||
|
|
cc3cca6077 | ||
|
|
f6ac99e081 | ||
|
|
a521c74a59 | ||
|
|
bfd219e708 | ||
|
|
eaf7795716 | ||
|
|
96392f3af0 | ||
|
|
b7c4128b1b | ||
|
|
86f929499e | ||
|
|
5bc26de0e7 | ||
|
|
bbe8fe164d | ||
|
|
72969711d2 |
90
.github/workflows/pipeline.yml
vendored
90
.github/workflows/pipeline.yml
vendored
@ -217,7 +217,7 @@ jobs:
|
|||||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||||
|
|
||||||
- name: Upload Binaries
|
- name: Upload Binaries
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: navidrome-${{ env.PLATFORM }}
|
name: navidrome-${{ env.PLATFORM }}
|
||||||
path: ./output
|
path: ./output
|
||||||
@ -248,7 +248,7 @@ jobs:
|
|||||||
touch "/tmp/digests/${digest#sha256:}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v6
|
||||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||||
with:
|
with:
|
||||||
name: digests-${{ env.PLATFORM }}
|
name: digests-${{ env.PLATFORM }}
|
||||||
@ -256,8 +256,11 @@ jobs:
|
|||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
push-manifest:
|
push-manifest-ghcr:
|
||||||
name: Push Docker manifest
|
name: Push to GHCR
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [build, check-push-enabled]
|
needs: [build, check-push-enabled]
|
||||||
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||||
@ -267,7 +270,41 @@ jobs:
|
|||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@v6
|
uses: actions/download-artifact@v7
|
||||||
|
with:
|
||||||
|
path: /tmp/digests
|
||||||
|
pattern: digests-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Prepare Docker Buildx
|
||||||
|
uses: ./.github/actions/prepare-docker
|
||||||
|
id: docker
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Create manifest list and push to ghcr.io
|
||||||
|
working-directory: /tmp/digests
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io"))) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
|
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||||
|
|
||||||
|
- name: Inspect image in ghcr.io
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||||
|
|
||||||
|
push-manifest-dockerhub:
|
||||||
|
name: Push to Docker Hub
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
needs: [build, check-push-enabled]
|
||||||
|
if: needs.check-push-enabled.outputs.is_enabled == 'true' && vars.DOCKER_HUB_REPO != ''
|
||||||
|
continue-on-error: true
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
path: /tmp/digests
|
path: /tmp/digests
|
||||||
pattern: digests-*
|
pattern: digests-*
|
||||||
@ -282,28 +319,27 @@ jobs:
|
|||||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||||
|
|
||||||
- name: Create manifest list and push to ghcr.io
|
|
||||||
working-directory: /tmp/digests
|
|
||||||
run: |
|
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
|
||||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
|
||||||
|
|
||||||
- name: Create manifest list and push to Docker Hub
|
- name: Create manifest list and push to Docker Hub
|
||||||
working-directory: /tmp/digests
|
uses: nick-fields/retry@v3
|
||||||
if: vars.DOCKER_HUB_REPO != ''
|
with:
|
||||||
run: |
|
timeout_minutes: 5
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
max_attempts: 3
|
||||||
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
|
retry_wait_seconds: 30
|
||||||
|
command: |
|
||||||
- name: Inspect image in ghcr.io
|
cd /tmp/digests
|
||||||
run: |
|
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io") | not)) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
$(printf 'ghcr.io/${{ github.repository }}@sha256:%s ' *)
|
||||||
|
|
||||||
- name: Inspect image in Docker Hub
|
- name: Inspect image in Docker Hub
|
||||||
if: vars.DOCKER_HUB_REPO != ''
|
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||||
|
|
||||||
|
cleanup-digests:
|
||||||
|
name: Cleanup digest artifacts
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [push-manifest-ghcr, push-manifest-dockerhub]
|
||||||
|
if: always() && needs.push-manifest-ghcr.result == 'success'
|
||||||
|
steps:
|
||||||
- name: Delete unnecessary digest artifacts
|
- name: Delete unnecessary digest artifacts
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
@ -320,7 +356,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- uses: actions/download-artifact@v6
|
- uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
path: ./binaries
|
path: ./binaries
|
||||||
pattern: navidrome-windows*
|
pattern: navidrome-windows*
|
||||||
@ -339,7 +375,7 @@ jobs:
|
|||||||
du -h binaries/msi/*.msi
|
du -h binaries/msi/*.msi
|
||||||
|
|
||||||
- name: Upload MSI files
|
- name: Upload MSI files
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: navidrome-windows-installers
|
name: navidrome-windows-installers
|
||||||
path: binaries/msi/*.msi
|
path: binaries/msi/*.msi
|
||||||
@ -357,7 +393,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
|
|
||||||
- uses: actions/download-artifact@v6
|
- uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
path: ./binaries
|
path: ./binaries
|
||||||
pattern: navidrome-*
|
pattern: navidrome-*
|
||||||
@ -383,7 +419,7 @@ jobs:
|
|||||||
rm ./dist/*.tar.gz ./dist/*.zip
|
rm ./dist/*.tar.gz ./dist/*.zip
|
||||||
|
|
||||||
- name: Upload all-packages artifact
|
- name: Upload all-packages artifact
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: packages
|
name: packages
|
||||||
path: dist/navidrome_0*
|
path: dist/navidrome_0*
|
||||||
@ -406,13 +442,13 @@ jobs:
|
|||||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Download all-packages artifact
|
- name: Download all-packages artifact
|
||||||
uses: actions/download-artifact@v6
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: packages
|
name: packages
|
||||||
path: ./dist
|
path: ./dist
|
||||||
|
|
||||||
- name: Upload all-packages artifact
|
- name: Upload all-packages artifact
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: navidrome_linux_${{ matrix.item }}
|
name: navidrome_linux_${{ matrix.item }}
|
||||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||||
|
|||||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v5
|
- uses: dessant/lock-threads@v6
|
||||||
with:
|
with:
|
||||||
process-only: 'issues, prs'
|
process-only: 'issues, prs'
|
||||||
issue-inactive-days: 120
|
issue-inactive-days: 120
|
||||||
|
|||||||
2
.github/workflows/update-translations.yml
vendored
2
.github/workflows/update-translations.yml
vendored
@ -24,7 +24,7 @@ jobs:
|
|||||||
git status --porcelain
|
git status --porcelain
|
||||||
git diff
|
git diff
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v7
|
uses: peter-evans/create-pull-request@v8
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.PAT }}
|
token: ${{ secrets.PAT }}
|
||||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -31,4 +31,5 @@ AGENTS.md
|
|||||||
.github/git-commit-instructions.md
|
.github/git-commit-instructions.md
|
||||||
*.exe
|
*.exe
|
||||||
*.test
|
*.test
|
||||||
*.wasm
|
*.wasm
|
||||||
|
openspec/
|
||||||
10
Dockerfile
10
Dockerfile
@ -2,10 +2,10 @@ FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcros
|
|||||||
|
|
||||||
########################################################################################################################
|
########################################################################################################################
|
||||||
### Build xx (original image: tonistiigi/xx)
|
### Build xx (original image: tonistiigi/xx)
|
||||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
|
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS xx-build
|
||||||
|
|
||||||
# v1.5.0
|
# v1.9.0
|
||||||
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
ENV XX_VERSION=a5592eab7a57895e8d385394ff12241bc65ecd50
|
||||||
|
|
||||||
RUN apk add -U --no-cache git
|
RUN apk add -U --no-cache git
|
||||||
RUN git clone https://github.com/tonistiigi/xx && \
|
RUN git clone https://github.com/tonistiigi/xx && \
|
||||||
@ -26,7 +26,7 @@ COPY --from=xx-build /out/ /usr/bin/
|
|||||||
|
|
||||||
########################################################################################################################
|
########################################################################################################################
|
||||||
### Get TagLib
|
### Get TagLib
|
||||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
|
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS taglib-build
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
ARG CROSS_TAGLIB_VERSION=2.1.1-1
|
ARG CROSS_TAGLIB_VERSION=2.1.1-1
|
||||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||||
@ -122,7 +122,7 @@ COPY --from=build /out /
|
|||||||
|
|
||||||
########################################################################################################################
|
########################################################################################################################
|
||||||
### Build Final Image
|
### Build Final Image
|
||||||
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
|
FROM public.ecr.aws/docker/library/alpine:3.20 AS final
|
||||||
LABEL maintainer="deluan@navidrome.org"
|
LABEL maintainer="deluan@navidrome.org"
|
||||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||||
|
|
||||||
|
|||||||
2
Makefile
2
Makefile
@ -16,7 +16,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
|
|||||||
|
|
||||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||||
CROSS_TAGLIB_VERSION ?= 2.1.1-1
|
CROSS_TAGLIB_VERSION ?= 2.1.1-1
|
||||||
GOLANGCI_LINT_VERSION ?= v2.6.2
|
GOLANGCI_LINT_VERSION ?= v2.7.2
|
||||||
|
|
||||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||||
|
|
||||||
|
|||||||
35
cmd/pls.go
35
cmd/pls.go
@ -10,11 +10,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/Masterminds/squirrel"
|
"github.com/Masterminds/squirrel"
|
||||||
"github.com/navidrome/navidrome/core/auth"
|
|
||||||
"github.com/navidrome/navidrome/db"
|
|
||||||
"github.com/navidrome/navidrome/log"
|
"github.com/navidrome/navidrome/log"
|
||||||
"github.com/navidrome/navidrome/model"
|
"github.com/navidrome/navidrome/model"
|
||||||
"github.com/navidrome/navidrome/persistence"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -52,7 +49,7 @@ var (
|
|||||||
Short: "Export playlists",
|
Short: "Export playlists",
|
||||||
Long: "Export Navidrome playlists to M3U files",
|
Long: "Export Navidrome playlists to M3U files",
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
runExporter()
|
runExporter(cmd.Context())
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,15 +57,13 @@ var (
|
|||||||
Use: "list",
|
Use: "list",
|
||||||
Short: "List playlists",
|
Short: "List playlists",
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
runList()
|
runList(cmd.Context())
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func runExporter() {
|
func runExporter(ctx context.Context) {
|
||||||
sqlDB := db.Db()
|
ds, ctx := getAdminContext(ctx)
|
||||||
ds := persistence.New(sqlDB)
|
|
||||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
|
||||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||||
@ -100,31 +95,19 @@ func runExporter() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runList() {
|
func runList(ctx context.Context) {
|
||||||
if outputFormat != "csv" && outputFormat != "json" {
|
if outputFormat != "csv" && outputFormat != "json" {
|
||||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlDB := db.Db()
|
ds, ctx := getAdminContext(ctx)
|
||||||
ds := persistence.New(sqlDB)
|
|
||||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
|
||||||
|
|
||||||
options := model.QueryOptions{Sort: "owner_name"}
|
options := model.QueryOptions{Sort: "owner_name"}
|
||||||
|
|
||||||
if userID != "" {
|
if userID != "" {
|
||||||
user, err := ds.User(ctx).FindByUsername(userID)
|
user, err := getUser(ctx, userID, ds)
|
||||||
|
if err != nil {
|
||||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
log.Fatal(ctx, "Error retrieving user", "username or id", userID)
|
||||||
log.Fatal("Error retrieving user by name", "name", userID, err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if errors.Is(err, model.ErrNotFound) {
|
|
||||||
user, err = ds.User(ctx).Get(userID)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal("Error retrieving user by id", "id", userID, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
46
cmd/scan.go
46
cmd/scan.go
@ -1,9 +1,12 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
"context"
|
"context"
|
||||||
"encoding/gob"
|
"encoding/gob"
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/navidrome/navidrome/core"
|
"github.com/navidrome/navidrome/core"
|
||||||
"github.com/navidrome/navidrome/db"
|
"github.com/navidrome/navidrome/db"
|
||||||
@ -19,12 +22,14 @@ var (
|
|||||||
fullScan bool
|
fullScan bool
|
||||||
subprocess bool
|
subprocess bool
|
||||||
targets []string
|
targets []string
|
||||||
|
targetFile string
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||||
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
|
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
|
||||||
|
scanCmd.Flags().StringVar(&targetFile, "target-file", "", "path to file containing targets (one libraryID:folderPath per line)")
|
||||||
rootCmd.AddCommand(scanCmd)
|
rootCmd.AddCommand(scanCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,10 +76,17 @@ func runScanner(ctx context.Context) {
|
|||||||
ds := persistence.New(sqlDB)
|
ds := persistence.New(sqlDB)
|
||||||
pls := core.NewPlaylists(ds)
|
pls := core.NewPlaylists(ds)
|
||||||
|
|
||||||
// Parse targets if provided
|
// Parse targets from command line or file
|
||||||
var scanTargets []model.ScanTarget
|
var scanTargets []model.ScanTarget
|
||||||
if len(targets) > 0 {
|
var err error
|
||||||
var err error
|
|
||||||
|
if targetFile != "" {
|
||||||
|
scanTargets, err = readTargetsFromFile(targetFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to read targets from file", err)
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Scanning specific folders from file", "numTargets", len(scanTargets))
|
||||||
|
} else if len(targets) > 0 {
|
||||||
scanTargets, err = model.ParseTargets(targets)
|
scanTargets, err = model.ParseTargets(targets)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(ctx, "Failed to parse targets", err)
|
log.Fatal(ctx, "Failed to parse targets", err)
|
||||||
@ -94,3 +106,31 @@ func runScanner(ctx context.Context) {
|
|||||||
trackScanInteractively(ctx, progress)
|
trackScanInteractively(ctx, progress)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// readTargetsFromFile reads scan targets from a file, one per line.
|
||||||
|
// Each line should be in the format "libraryID:folderPath".
|
||||||
|
// Empty lines and lines starting with # are ignored.
|
||||||
|
func readTargetsFromFile(filePath string) ([]model.ScanTarget, error) {
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open target file: %w", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
var targetStrings []string
|
||||||
|
scanner := bufio.NewScanner(file)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if line == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
targetStrings = append(targetStrings, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read target file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return model.ParseTargets(targetStrings)
|
||||||
|
}
|
||||||
|
|||||||
89
cmd/scan_test.go
Normal file
89
cmd/scan_test.go
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("readTargetsFromFile", func() {
|
||||||
|
var tempDir string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
var err error
|
||||||
|
tempDir, err = os.MkdirTemp("", "navidrome-test-")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
os.RemoveAll(tempDir)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("reads valid targets from file", func() {
|
||||||
|
filePath := filepath.Join(tempDir, "targets.txt")
|
||||||
|
content := "1:Music/Rock\n2:Music/Jazz\n3:Classical\n"
|
||||||
|
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
targets, err := readTargetsFromFile(filePath)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(targets).To(HaveLen(3))
|
||||||
|
Expect(targets[0]).To(Equal(model.ScanTarget{LibraryID: 1, FolderPath: "Music/Rock"}))
|
||||||
|
Expect(targets[1]).To(Equal(model.ScanTarget{LibraryID: 2, FolderPath: "Music/Jazz"}))
|
||||||
|
Expect(targets[2]).To(Equal(model.ScanTarget{LibraryID: 3, FolderPath: "Classical"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("skips empty lines", func() {
|
||||||
|
filePath := filepath.Join(tempDir, "targets.txt")
|
||||||
|
content := "1:Music/Rock\n\n2:Music/Jazz\n\n"
|
||||||
|
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
targets, err := readTargetsFromFile(filePath)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(targets).To(HaveLen(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("trims whitespace", func() {
|
||||||
|
filePath := filepath.Join(tempDir, "targets.txt")
|
||||||
|
content := " 1:Music/Rock \n\t2:Music/Jazz\t\n"
|
||||||
|
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
targets, err := readTargetsFromFile(filePath)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(targets).To(HaveLen(2))
|
||||||
|
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
|
||||||
|
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns error for non-existent file", func() {
|
||||||
|
_, err := readTargetsFromFile("/nonexistent/file.txt")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("failed to open target file"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns error for invalid target format", func() {
|
||||||
|
filePath := filepath.Join(tempDir, "targets.txt")
|
||||||
|
content := "invalid-format\n"
|
||||||
|
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
_, err = readTargetsFromFile(filePath)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles mixed valid and empty lines", func() {
|
||||||
|
filePath := filepath.Join(tempDir, "targets.txt")
|
||||||
|
content := "\n1:Music/Rock\n\n\n2:Music/Jazz\n\n"
|
||||||
|
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
targets, err := readTargetsFromFile(filePath)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(targets).To(HaveLen(2))
|
||||||
|
})
|
||||||
|
})
|
||||||
477
cmd/user.go
Normal file
477
cmd/user.go
Normal file
@ -0,0 +1,477 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/csv"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Masterminds/squirrel"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"golang.org/x/term"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
email string
|
||||||
|
libraryIds []int
|
||||||
|
name string
|
||||||
|
|
||||||
|
removeEmail bool
|
||||||
|
removeName bool
|
||||||
|
setAdmin bool
|
||||||
|
setPassword bool
|
||||||
|
setRegularUser bool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(userRoot)
|
||||||
|
|
||||||
|
userCreateCommand.Flags().StringVarP(&userID, "username", "u", "", "username")
|
||||||
|
|
||||||
|
userCreateCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
|
||||||
|
userCreateCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries. If empty, the user can access all libraries. This is incompatible with admin, as admin can always access all libraries")
|
||||||
|
|
||||||
|
userCreateCommand.Flags().BoolVarP(&setAdmin, "admin", "a", false, "If set, make the user an admin. This user will have access to every library")
|
||||||
|
userCreateCommand.Flags().StringVar(&name, "name", "", "New user's name (this is separate from username used to log in)")
|
||||||
|
|
||||||
|
_ = userCreateCommand.MarkFlagRequired("username")
|
||||||
|
|
||||||
|
userRoot.AddCommand(userCreateCommand)
|
||||||
|
|
||||||
|
userDeleteCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
|
||||||
|
_ = userDeleteCommand.MarkFlagRequired("user")
|
||||||
|
userRoot.AddCommand(userDeleteCommand)
|
||||||
|
|
||||||
|
userEditCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&setAdmin, "set-admin", false, "If set, make the user an admin")
|
||||||
|
userEditCommand.Flags().BoolVar(&setRegularUser, "set-regular", false, "If set, make the user a non-admin")
|
||||||
|
userEditCommand.MarkFlagsMutuallyExclusive("set-admin", "set-regular")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&removeEmail, "remove-email", false, "If set, clear the user's email")
|
||||||
|
userEditCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
|
||||||
|
userEditCommand.MarkFlagsMutuallyExclusive("email", "remove-email")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&removeName, "remove-name", false, "If set, clear the user's name")
|
||||||
|
userEditCommand.Flags().StringVar(&name, "name", "", "New user name (this is separate from username used to log in)")
|
||||||
|
userEditCommand.MarkFlagsMutuallyExclusive("name", "remove-name")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&setPassword, "set-password", false, "If set, the user's new password will be prompted on the CLI")
|
||||||
|
|
||||||
|
userEditCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries by id")
|
||||||
|
|
||||||
|
_ = userEditCommand.MarkFlagRequired("user")
|
||||||
|
userRoot.AddCommand(userEditCommand)
|
||||||
|
|
||||||
|
userListCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||||
|
userRoot.AddCommand(userListCommand)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
userRoot = &cobra.Command{
|
||||||
|
Use: "user",
|
||||||
|
Short: "Administer users",
|
||||||
|
Long: "Create, delete, list, or update users",
|
||||||
|
}
|
||||||
|
|
||||||
|
userCreateCommand = &cobra.Command{
|
||||||
|
Use: "create",
|
||||||
|
Aliases: []string{"c"},
|
||||||
|
Short: "Create a new user",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runCreateUser(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
userDeleteCommand = &cobra.Command{
|
||||||
|
Use: "delete",
|
||||||
|
Aliases: []string{"d"},
|
||||||
|
Short: "Deletes an existing user",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runDeleteUser(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
userEditCommand = &cobra.Command{
|
||||||
|
Use: "edit",
|
||||||
|
Aliases: []string{"e"},
|
||||||
|
Short: "Edit a user",
|
||||||
|
Long: "Edit the password, admin status, and/or library access",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runUserEdit(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
userListCommand = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List users",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runUserList(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func promptPassword() string {
|
||||||
|
for {
|
||||||
|
fmt.Print("Enter new password (press enter with no password to cancel): ")
|
||||||
|
// This cast is necessary for some platforms
|
||||||
|
password, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error getting password", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print("\nConfirm new password (press enter with no password to cancel): ")
|
||||||
|
confirmation, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error getting password confirmation", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear the line.
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
pass := string(password)
|
||||||
|
confirm := string(confirmation)
|
||||||
|
|
||||||
|
if pass == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if pass == confirm {
|
||||||
|
return pass
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Password and password confirmation do not match")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func libraryError(libraries model.Libraries) error {
|
||||||
|
ids := make([]int, len(libraries))
|
||||||
|
for idx, library := range libraries {
|
||||||
|
ids[idx] = library.ID
|
||||||
|
}
|
||||||
|
return fmt.Errorf("not all available libraries found. Requested ids: %v, Found libraries: %v", libraryIds, ids)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runCreateUser(ctx context.Context) {
|
||||||
|
password := promptPassword()
|
||||||
|
if password == "" {
|
||||||
|
log.Fatal("Empty password provided, user creation cancelled")
|
||||||
|
}
|
||||||
|
|
||||||
|
user := model.User{
|
||||||
|
UserName: userID,
|
||||||
|
Email: email,
|
||||||
|
Name: name,
|
||||||
|
IsAdmin: setAdmin,
|
||||||
|
NewPassword: password,
|
||||||
|
}
|
||||||
|
|
||||||
|
if user.Name == "" {
|
||||||
|
user.Name = userID
|
||||||
|
}
|
||||||
|
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
err := ds.WithTx(func(tx model.DataStore) error {
|
||||||
|
existingUser, err := tx.User(ctx).FindByUsername(userID)
|
||||||
|
if existingUser != nil {
|
||||||
|
return fmt.Errorf("existing user '%s'", userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
|
return fmt.Errorf("failed to check existing username: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(libraryIds) > 0 && !setAdmin {
|
||||||
|
user.Libraries, err = tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(user.Libraries) != len(libraryIds) {
|
||||||
|
return libraryError(user.Libraries)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
user.Libraries, err = tx.Library(ctx).GetAll()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tx.User(ctx).Put(&user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
updatedIds := make([]int, len(user.Libraries))
|
||||||
|
for idx, lib := range user.Libraries {
|
||||||
|
updatedIds[idx] = lib.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info(ctx, "Successfully created user", "id", user.ID, "username", user.UserName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDeleteUser(ctx context.Context) {
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var user *model.User
|
||||||
|
|
||||||
|
err = ds.WithTx(func(tx model.DataStore) error {
|
||||||
|
count, err := tx.User(ctx).CountAll()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if count == 1 {
|
||||||
|
return errors.New("refusing to delete the last user")
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err = getUser(ctx, userID, tx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.User(ctx).Delete(user.ID)
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to delete user", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info(ctx, "Deleted user", "username", user.UserName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runUserEdit(ctx context.Context) {
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var user *model.User
|
||||||
|
changes := []string{}
|
||||||
|
|
||||||
|
err = ds.WithTx(func(tx model.DataStore) error {
|
||||||
|
var newLibraries model.Libraries
|
||||||
|
|
||||||
|
user, err = getUser(ctx, userID, tx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(libraryIds) > 0 && !setAdmin {
|
||||||
|
libraries, err := tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(libraries) != len(libraryIds) {
|
||||||
|
return libraryError(libraries)
|
||||||
|
}
|
||||||
|
|
||||||
|
newLibraries = libraries
|
||||||
|
changes = append(changes, "updated library ids")
|
||||||
|
}
|
||||||
|
|
||||||
|
if setAdmin && !user.IsAdmin {
|
||||||
|
libraries, err := tx.Library(ctx).GetAll()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
user.IsAdmin = true
|
||||||
|
user.Libraries = libraries
|
||||||
|
changes = append(changes, "set admin")
|
||||||
|
|
||||||
|
newLibraries = libraries
|
||||||
|
}
|
||||||
|
|
||||||
|
if setRegularUser && user.IsAdmin {
|
||||||
|
user.IsAdmin = false
|
||||||
|
changes = append(changes, "set regular user")
|
||||||
|
}
|
||||||
|
|
||||||
|
if setPassword {
|
||||||
|
password := promptPassword()
|
||||||
|
|
||||||
|
if password != "" {
|
||||||
|
user.NewPassword = password
|
||||||
|
changes = append(changes, "updated password")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if email != "" && email != user.Email {
|
||||||
|
user.Email = email
|
||||||
|
changes = append(changes, "updated email")
|
||||||
|
} else if removeEmail && user.Email != "" {
|
||||||
|
user.Email = ""
|
||||||
|
changes = append(changes, "removed email")
|
||||||
|
}
|
||||||
|
|
||||||
|
if name != "" && name != user.Name {
|
||||||
|
user.Name = name
|
||||||
|
changes = append(changes, "updated name")
|
||||||
|
} else if removeName && user.Name != "" {
|
||||||
|
user.Name = ""
|
||||||
|
changes = append(changes, "removed name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(changes) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := tx.User(ctx).Put(user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(newLibraries) > 0 {
|
||||||
|
updatedIds := make([]int, len(newLibraries))
|
||||||
|
for idx, lib := range newLibraries {
|
||||||
|
updatedIds[idx] = lib.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
err := tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to update user", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(changes) == 0 {
|
||||||
|
log.Info(ctx, "No changes for user", "user", user.UserName)
|
||||||
|
} else {
|
||||||
|
log.Info(ctx, "Updated user", "user", user.UserName, "changes", strings.Join(changes, ", "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type displayLibrary struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type displayUser struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
Admin bool `json:"admin"`
|
||||||
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
|
UpdatedAt time.Time `json:"updatedAt"`
|
||||||
|
LastAccess *time.Time `json:"lastAccess"`
|
||||||
|
LastLogin *time.Time `json:"lastLogin"`
|
||||||
|
Libraries []displayLibrary `json:"libraries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func runUserList(ctx context.Context) {
|
||||||
|
if outputFormat != "csv" && outputFormat != "json" {
|
||||||
|
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
users, err := ds.User(ctx).ReadAll()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to retrieve users", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
userList := users.(model.Users)
|
||||||
|
|
||||||
|
if outputFormat == "csv" {
|
||||||
|
w := csv.NewWriter(os.Stdout)
|
||||||
|
_ = w.Write([]string{
|
||||||
|
"user id",
|
||||||
|
"username",
|
||||||
|
"user's name",
|
||||||
|
"user email",
|
||||||
|
"admin",
|
||||||
|
"created at",
|
||||||
|
"updated at",
|
||||||
|
"last access",
|
||||||
|
"last login",
|
||||||
|
"libraries",
|
||||||
|
})
|
||||||
|
for _, user := range userList {
|
||||||
|
paths := make([]string, len(user.Libraries))
|
||||||
|
|
||||||
|
for idx, library := range user.Libraries {
|
||||||
|
paths[idx] = fmt.Sprintf("%d:%s", library.ID, library.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastAccess, lastLogin string
|
||||||
|
|
||||||
|
if user.LastAccessAt != nil {
|
||||||
|
lastAccess = user.LastAccessAt.Format(time.RFC3339Nano)
|
||||||
|
} else {
|
||||||
|
lastAccess = "never"
|
||||||
|
}
|
||||||
|
|
||||||
|
if user.LastLoginAt != nil {
|
||||||
|
lastLogin = user.LastLoginAt.Format(time.RFC3339Nano)
|
||||||
|
} else {
|
||||||
|
lastLogin = "never"
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = w.Write([]string{
|
||||||
|
user.ID,
|
||||||
|
user.UserName,
|
||||||
|
user.Name,
|
||||||
|
user.Email,
|
||||||
|
strconv.FormatBool(user.IsAdmin),
|
||||||
|
user.CreatedAt.Format(time.RFC3339Nano),
|
||||||
|
user.UpdatedAt.Format(time.RFC3339Nano),
|
||||||
|
lastAccess,
|
||||||
|
lastLogin,
|
||||||
|
fmt.Sprintf("'%s'", strings.Join(paths, "|")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
} else {
|
||||||
|
users := make([]displayUser, len(userList))
|
||||||
|
for idx, user := range userList {
|
||||||
|
paths := make([]displayLibrary, len(user.Libraries))
|
||||||
|
|
||||||
|
for idx, library := range user.Libraries {
|
||||||
|
paths[idx].ID = library.ID
|
||||||
|
paths[idx].Path = library.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
users[idx].Id = user.ID
|
||||||
|
users[idx].Username = user.UserName
|
||||||
|
users[idx].Name = user.Name
|
||||||
|
users[idx].Email = user.Email
|
||||||
|
users[idx].Admin = user.IsAdmin
|
||||||
|
users[idx].CreatedAt = user.CreatedAt
|
||||||
|
users[idx].UpdatedAt = user.UpdatedAt
|
||||||
|
users[idx].LastAccess = user.LastAccessAt
|
||||||
|
users[idx].LastLogin = user.LastLoginAt
|
||||||
|
users[idx].Libraries = paths
|
||||||
|
}
|
||||||
|
|
||||||
|
j, _ := json.Marshal(users)
|
||||||
|
fmt.Printf("%s\n", j)
|
||||||
|
}
|
||||||
|
}
|
||||||
42
cmd/utils.go
Normal file
42
cmd/utils.go
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/core/auth"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/model/request"
|
||||||
|
"github.com/navidrome/navidrome/persistence"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getAdminContext(ctx context.Context) (model.DataStore, context.Context) {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
ds := persistence.New(sqlDB)
|
||||||
|
ctx = auth.WithAdminUser(ctx, ds)
|
||||||
|
u, _ := request.UserFrom(ctx)
|
||||||
|
if !u.IsAdmin {
|
||||||
|
log.Fatal(ctx, "There must be at least one admin user to run this command.")
|
||||||
|
}
|
||||||
|
return ds, ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func getUser(ctx context.Context, id string, ds model.DataStore) (*model.User, error) {
|
||||||
|
user, err := ds.User(ctx).FindByUsername(id)
|
||||||
|
|
||||||
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
|
return nil, fmt.Errorf("finding user by name: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if errors.Is(err, model.ErrNotFound) {
|
||||||
|
user, err = ds.User(ctx).Get(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("finding user by id: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return user, nil
|
||||||
|
}
|
||||||
@ -90,7 +90,7 @@ type configOptions struct {
|
|||||||
ExtAuth extAuthOptions
|
ExtAuth extAuthOptions
|
||||||
Plugins pluginsOptions
|
Plugins pluginsOptions
|
||||||
PluginConfig map[string]map[string]string
|
PluginConfig map[string]map[string]string
|
||||||
HTTPSecurityHeaders secureOptions `json:",omitzero"`
|
HTTPHeaders httpHeaderOptions `json:",omitzero"`
|
||||||
Prometheus prometheusOptions `json:",omitzero"`
|
Prometheus prometheusOptions `json:",omitzero"`
|
||||||
Scanner scannerOptions `json:",omitzero"`
|
Scanner scannerOptions `json:",omitzero"`
|
||||||
Jukebox jukeboxOptions `json:",omitzero"`
|
Jukebox jukeboxOptions `json:",omitzero"`
|
||||||
@ -102,7 +102,8 @@ type configOptions struct {
|
|||||||
Spotify spotifyOptions `json:",omitzero"`
|
Spotify spotifyOptions `json:",omitzero"`
|
||||||
Deezer deezerOptions `json:",omitzero"`
|
Deezer deezerOptions `json:",omitzero"`
|
||||||
ListenBrainz listenBrainzOptions `json:",omitzero"`
|
ListenBrainz listenBrainzOptions `json:",omitzero"`
|
||||||
Tags map[string]TagConf `json:",omitempty"`
|
EnableScrobbleHistory bool
|
||||||
|
Tags map[string]TagConf `json:",omitempty"`
|
||||||
Agents string
|
Agents string
|
||||||
|
|
||||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||||
@ -187,8 +188,8 @@ type listenBrainzOptions struct {
|
|||||||
BaseURL string
|
BaseURL string
|
||||||
}
|
}
|
||||||
|
|
||||||
type secureOptions struct {
|
type httpHeaderOptions struct {
|
||||||
CustomFrameOptionsValue string
|
FrameOptions string
|
||||||
}
|
}
|
||||||
|
|
||||||
type prometheusOptions struct {
|
type prometheusOptions struct {
|
||||||
@ -256,6 +257,7 @@ func Load(noConfigDump bool) {
|
|||||||
// Map deprecated options to their new names for backwards compatibility
|
// Map deprecated options to their new names for backwards compatibility
|
||||||
mapDeprecatedOption("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
mapDeprecatedOption("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||||
mapDeprecatedOption("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
mapDeprecatedOption("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||||
|
mapDeprecatedOption("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
|
||||||
|
|
||||||
err := viper.Unmarshal(&Server)
|
err := viper.Unmarshal(&Server)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -343,6 +345,8 @@ func Load(noConfigDump bool) {
|
|||||||
// Log configuration source
|
// Log configuration source
|
||||||
if Server.ConfigFile != "" {
|
if Server.ConfigFile != "" {
|
||||||
log.Info("Loaded configuration", "file", Server.ConfigFile)
|
log.Info("Loaded configuration", "file", Server.ConfigFile)
|
||||||
|
} else if hasNDEnvVars() {
|
||||||
|
log.Info("No configuration file found. Loaded configuration only from environment variables")
|
||||||
} else {
|
} else {
|
||||||
log.Warn("No configuration file found. Using default values. To specify a config file, use the --configfile flag or set the ND_CONFIGFILE environment variable.")
|
log.Warn("No configuration file found. Using default values. To specify a config file, use the --configfile flag or set the ND_CONFIGFILE environment variable.")
|
||||||
}
|
}
|
||||||
@ -364,10 +368,12 @@ func Load(noConfigDump bool) {
|
|||||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||||
}
|
}
|
||||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
logDeprecatedOptions("Scanner.GenreSeparators", "")
|
||||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
logDeprecatedOptions("Scanner.GroupAlbumReleases", "")
|
||||||
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
logDeprecatedOptions("DevEnableBufferedScrobble", "") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||||
logDeprecatedOptions("ReverseProxyWhitelist", "ReverseProxyUserHeader")
|
logDeprecatedOptions("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||||
|
logDeprecatedOptions("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||||
|
logDeprecatedOptions("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
|
||||||
|
|
||||||
// Call init hooks
|
// Call init hooks
|
||||||
for _, hook := range hooks {
|
for _, hook := range hooks {
|
||||||
@ -375,16 +381,22 @@ func Load(noConfigDump bool) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func logDeprecatedOptions(options ...string) {
|
func logDeprecatedOptions(oldName, newName string) {
|
||||||
for _, option := range options {
|
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(oldName, ".", "_"))
|
||||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
newEnvVar := "ND_" + strings.ToUpper(strings.ReplaceAll(newName, ".", "_"))
|
||||||
if os.Getenv(envVar) != "" {
|
logWarning := func(oldName, newName string) {
|
||||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
if newName != "" {
|
||||||
}
|
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release. Please use the new '%s'", oldName, newName))
|
||||||
if viper.InConfig(option) {
|
} else {
|
||||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", oldName))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if os.Getenv(envVar) != "" {
|
||||||
|
logWarning(envVar, newEnvVar)
|
||||||
|
}
|
||||||
|
if viper.InConfig(oldName) {
|
||||||
|
logWarning(oldName, newName)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// mapDeprecatedOption is used to provide backwards compatibility for deprecated options. It should be called after
|
// mapDeprecatedOption is used to provide backwards compatibility for deprecated options. It should be called after
|
||||||
@ -501,6 +513,16 @@ func AddHook(hook func()) {
|
|||||||
hooks = append(hooks, hook)
|
hooks = append(hooks, hook)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// hasNDEnvVars checks if any ND_ prefixed environment variables are set (excluding ND_CONFIGFILE)
|
||||||
|
func hasNDEnvVars() bool {
|
||||||
|
for _, env := range os.Environ() {
|
||||||
|
if strings.HasPrefix(env, "ND_") && !strings.HasPrefix(env, "ND_CONFIGFILE=") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func setViperDefaults() {
|
func setViperDefaults() {
|
||||||
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
|
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
|
||||||
viper.SetDefault("cachefolder", "")
|
viper.SetDefault("cachefolder", "")
|
||||||
@ -585,7 +607,7 @@ func setViperDefaults() {
|
|||||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||||
viper.SetDefault("subsonic.artistparticipations", false)
|
viper.SetDefault("subsonic.artistparticipations", false)
|
||||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
viper.SetDefault("subsonic.legacyclients", "DSub,SubMusic")
|
||||||
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
||||||
viper.SetDefault("lastfm.enabled", true)
|
viper.SetDefault("lastfm.enabled", true)
|
||||||
viper.SetDefault("lastfm.language", "en")
|
viper.SetDefault("lastfm.language", "en")
|
||||||
@ -598,7 +620,8 @@ func setViperDefaults() {
|
|||||||
viper.SetDefault("deezer.language", "en")
|
viper.SetDefault("deezer.language", "en")
|
||||||
viper.SetDefault("listenbrainz.enabled", true)
|
viper.SetDefault("listenbrainz.enabled", true)
|
||||||
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
||||||
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
viper.SetDefault("enablescrobblehistory", true)
|
||||||
|
viper.SetDefault("httpheaders.frameoptions", "DENY")
|
||||||
viper.SetDefault("backup.path", "")
|
viper.SetDefault("backup.path", "")
|
||||||
viper.SetDefault("backup.schedule", "")
|
viper.SetDefault("backup.schedule", "")
|
||||||
viper.SetDefault("backup.count", 0)
|
viper.SetDefault("backup.count", 0)
|
||||||
|
|||||||
@ -43,6 +43,7 @@ func newClient(hc httpDoer, language string) *client {
|
|||||||
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||||
params := url.Values{}
|
params := url.Values{}
|
||||||
params.Add("q", name)
|
params.Add("q", name)
|
||||||
|
params.Add("order", "RANKING")
|
||||||
params.Add("limit", strconv.Itoa(limit))
|
params.Add("limit", strconv.Itoa(limit))
|
||||||
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package deezer
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -82,10 +83,20 @@ func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, e
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Trace(ctx, "Artists found", "count", len(artists), "searched_name", name)
|
||||||
|
for i := range artists {
|
||||||
|
log.Trace(ctx, fmt.Sprintf("Artists found #%d", i), "name", artists[i].Name, "id", artists[i].ID, "link", artists[i].Link)
|
||||||
|
if i > 2 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If the first one has the same name, that's the one
|
// If the first one has the same name, that's the one
|
||||||
if !strings.EqualFold(artists[0].Name, name) {
|
if !strings.EqualFold(artists[0].Name, name) {
|
||||||
|
log.Trace(ctx, "Top artist do not match", "searched_name", name, "found_name", artists[0].Name)
|
||||||
return nil, agents.ErrNotFound
|
return nil, agents.ErrNotFound
|
||||||
}
|
}
|
||||||
|
log.Trace(ctx, "Found artist", "name", artists[0].Name, "id", artists[0].ID, "link", artists[0].Link)
|
||||||
return &artists[0], err
|
return &artists[0], err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -290,11 +290,11 @@ func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName str
|
|||||||
return t.Track, nil
|
return t.Track, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile) string {
|
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile, role model.Role, displayName string) string {
|
||||||
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[model.RoleArtist]) > 0 {
|
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[role]) > 0 {
|
||||||
return track.Participants[model.RoleArtist][0].Name
|
return track.Participants[role][0].Name
|
||||||
}
|
}
|
||||||
return track.Artist
|
return displayName
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||||
@ -304,13 +304,13 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
|
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
|
||||||
artist: l.getArtistForScrobble(track),
|
artist: l.getArtistForScrobble(track, model.RoleArtist, track.Artist),
|
||||||
track: track.Title,
|
track: track.Title,
|
||||||
album: track.Album,
|
album: track.Album,
|
||||||
trackNumber: track.TrackNumber,
|
trackNumber: track.TrackNumber,
|
||||||
mbid: track.MbzRecordingID,
|
mbid: track.MbzRecordingID,
|
||||||
duration: int(track.Duration),
|
duration: int(track.Duration),
|
||||||
albumArtist: track.AlbumArtist,
|
albumArtist: l.getArtistForScrobble(track, model.RoleAlbumArtist, track.AlbumArtist),
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||||
@ -330,13 +330,13 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
|
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
|
||||||
artist: l.getArtistForScrobble(&s.MediaFile),
|
artist: l.getArtistForScrobble(&s.MediaFile, model.RoleArtist, s.Artist),
|
||||||
track: s.Title,
|
track: s.Title,
|
||||||
album: s.Album,
|
album: s.Album,
|
||||||
trackNumber: s.TrackNumber,
|
trackNumber: s.TrackNumber,
|
||||||
mbid: s.MbzRecordingID,
|
mbid: s.MbzRecordingID,
|
||||||
duration: int(s.Duration),
|
duration: int(s.Duration),
|
||||||
albumArtist: s.AlbumArtist,
|
albumArtist: l.getArtistForScrobble(&s.MediaFile, model.RoleAlbumArtist, s.AlbumArtist),
|
||||||
timestamp: s.TimeStamp,
|
timestamp: s.TimeStamp,
|
||||||
})
|
})
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|||||||
@ -201,6 +201,10 @@ var _ = Describe("lastfmAgent", func() {
|
|||||||
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
|
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
|
||||||
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
|
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
|
||||||
},
|
},
|
||||||
|
model.RoleAlbumArtist: []model.Participant{
|
||||||
|
{Artist: model.Artist{ID: "ar-1", Name: "First Album Artist"}},
|
||||||
|
{Artist: model.Artist{ID: "ar-2", Name: "Second Album Artist"}},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -229,6 +233,23 @@ var _ = Describe("lastfmAgent", func() {
|
|||||||
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
||||||
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
When("ScrobbleFirstArtistOnly is true", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses only the first artist", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||||
|
|
||||||
|
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
sentParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||||
|
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("scrobble", func() {
|
Describe("scrobble", func() {
|
||||||
@ -267,6 +288,7 @@ var _ = Describe("lastfmAgent", func() {
|
|||||||
Expect(err).ToNot(HaveOccurred())
|
Expect(err).ToNot(HaveOccurred())
|
||||||
sentParams := httpClient.SavedRequest.URL.Query()
|
sentParams := httpClient.SavedRequest.URL.Query()
|
||||||
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||||
|
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@ -130,58 +130,99 @@ func (s *Stream) EstimatedContentLength() int {
|
|||||||
return int(s.mf.Duration * float32(s.bitRate) / 8 * 1024)
|
return int(s.mf.Duration * float32(s.bitRate) / 8 * 1024)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO This function deserves some love (refactoring)
|
|
||||||
func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model.MediaFile, reqFormat string, reqBitRate int) (format string, bitRate int) {
|
func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model.MediaFile, reqFormat string, reqBitRate int) (format string, bitRate int) {
|
||||||
|
// Default case
|
||||||
format = "raw"
|
format = "raw"
|
||||||
|
bitRate = 0
|
||||||
|
|
||||||
|
// If the client explicitly requests "raw"
|
||||||
|
// then always serve the original
|
||||||
if reqFormat == "raw" {
|
if reqFormat == "raw" {
|
||||||
|
return format, bitRate
|
||||||
|
}
|
||||||
|
|
||||||
|
// If requested format matches the file’s suffix and
|
||||||
|
// no bitrate reduction is requested then
|
||||||
|
// stream the file without transcoding
|
||||||
|
if reqFormat == mf.Suffix && reqBitRate == 0 {
|
||||||
|
return format, mf.BitRate
|
||||||
|
}
|
||||||
|
|
||||||
|
targetFormat, targetBitRate := findTargetTranscodingOptions(ctx, mf, reqFormat, reqBitRate)
|
||||||
|
|
||||||
|
// If nothing was found then stream raw
|
||||||
|
if targetFormat == "" && targetBitRate == 0 {
|
||||||
return format, 0
|
return format, 0
|
||||||
}
|
}
|
||||||
if reqFormat == mf.Suffix && reqBitRate == 0 {
|
|
||||||
bitRate = mf.BitRate
|
t, err := ds.Transcoding(ctx).FindByFormat(targetFormat)
|
||||||
return format, bitRate
|
if err != nil {
|
||||||
|
// TODO: log error?
|
||||||
|
return format, 0
|
||||||
}
|
}
|
||||||
trc, hasDefault := request.TranscodingFrom(ctx)
|
|
||||||
var cFormat string
|
format = t.TargetFormat
|
||||||
var cBitRate int
|
|
||||||
if reqFormat != "" {
|
// If no target bitrate was specified
|
||||||
cFormat = reqFormat
|
// fall back to the transcoding’s configuration
|
||||||
|
// default bitrate
|
||||||
|
if targetBitRate == 0 {
|
||||||
|
bitRate = t.DefaultBitRate
|
||||||
} else {
|
} else {
|
||||||
if hasDefault {
|
bitRate = targetBitRate
|
||||||
cFormat = trc.TargetFormat
|
|
||||||
cBitRate = trc.DefaultBitRate
|
|
||||||
if p, ok := request.PlayerFrom(ctx); ok {
|
|
||||||
cBitRate = p.MaxBitRate
|
|
||||||
}
|
|
||||||
} else if reqBitRate > 0 && reqBitRate < mf.BitRate && conf.Server.DefaultDownsamplingFormat != "" {
|
|
||||||
// If no format is specified and no transcoding associated to the player, but a bitrate is specified,
|
|
||||||
// and there is no transcoding set for the player, we use the default downsampling format.
|
|
||||||
// But only if the requested bitRate is lower than the original bitRate.
|
|
||||||
log.Debug("Default Downsampling", "Using default downsampling format", conf.Server.DefaultDownsamplingFormat)
|
|
||||||
cFormat = conf.Server.DefaultDownsamplingFormat
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if reqBitRate > 0 {
|
|
||||||
cBitRate = reqBitRate
|
|
||||||
}
|
|
||||||
if cBitRate == 0 && cFormat == "" {
|
|
||||||
return format, bitRate
|
|
||||||
}
|
|
||||||
t, err := ds.Transcoding(ctx).FindByFormat(cFormat)
|
|
||||||
if err == nil {
|
|
||||||
format = t.TargetFormat
|
|
||||||
if cBitRate != 0 {
|
|
||||||
bitRate = cBitRate
|
|
||||||
} else {
|
|
||||||
bitRate = t.DefaultBitRate
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the final format is the same as the original
|
||||||
|
// and does not reduce bitrate
|
||||||
|
// there’s no reason to transcode
|
||||||
if format == mf.Suffix && bitRate >= mf.BitRate {
|
if format == mf.Suffix && bitRate >= mf.BitRate {
|
||||||
format = "raw"
|
return "raw", 0
|
||||||
bitRate = 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return format, bitRate
|
return format, bitRate
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func findTargetTranscodingOptions(ctx context.Context, mf *model.MediaFile, reqFormat string, reqBitRate int) (string, int) {
|
||||||
|
// If a format is requested use that
|
||||||
|
if reqFormat != "" {
|
||||||
|
return reqFormat, reqBitRate
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a default transcoding configuration exists for this context
|
||||||
|
if trc, ok := request.TranscodingFrom(ctx); ok {
|
||||||
|
targetFormat := trc.TargetFormat
|
||||||
|
targetBitRate := trc.DefaultBitRate
|
||||||
|
|
||||||
|
// If a player is configured adjust bitrate based on
|
||||||
|
// user request or player limits
|
||||||
|
if p, hasPlayer := request.PlayerFrom(ctx); hasPlayer {
|
||||||
|
if reqBitRate > 0 {
|
||||||
|
targetBitRate = reqBitRate
|
||||||
|
} else if p.MaxBitRate > 0 {
|
||||||
|
targetBitRate = p.MaxBitRate
|
||||||
|
}
|
||||||
|
} else if reqBitRate > 0 {
|
||||||
|
targetBitRate = reqBitRate
|
||||||
|
}
|
||||||
|
|
||||||
|
return targetFormat, targetBitRate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the default downsampling format the server is configured to but
|
||||||
|
// only if the requested bitrate is reduced
|
||||||
|
isBitrateReduced := reqBitRate > 0 && reqBitRate < mf.BitRate
|
||||||
|
hasDefaultDownsamplingFormat := conf.Server.DefaultDownsamplingFormat != ""
|
||||||
|
|
||||||
|
if isBitrateReduced && hasDefaultDownsamplingFormat {
|
||||||
|
log.Debug("Default Downsampling",
|
||||||
|
"Using default downsampling format",
|
||||||
|
conf.Server.DefaultDownsamplingFormat)
|
||||||
|
return conf.Server.DefaultDownsamplingFormat, reqBitRate
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", 0
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
onceTranscodingCache sync.Once
|
onceTranscodingCache sync.Once
|
||||||
instanceTranscodingCache TranscodingCache
|
instanceTranscodingCache TranscodingCache
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmp"
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
@ -9,7 +10,7 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -194,22 +195,35 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
|||||||
}
|
}
|
||||||
filteredLines = append(filteredLines, line)
|
filteredLines = append(filteredLines, line)
|
||||||
}
|
}
|
||||||
paths, err := s.normalizePaths(ctx, pls, folder, filteredLines)
|
resolvedPaths, err := s.resolvePaths(ctx, folder, filteredLines)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn(ctx, "Error normalizing paths in playlist", "playlist", pls.Name, err)
|
log.Warn(ctx, "Error resolving paths in playlist", "playlist", pls.Name, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
found, err := mediaFileRepository.FindByPaths(paths)
|
|
||||||
|
// Normalize to NFD for filesystem compatibility (macOS). Database stores paths in NFD.
|
||||||
|
// See https://github.com/navidrome/navidrome/issues/4663
|
||||||
|
resolvedPaths = slice.Map(resolvedPaths, func(path string) string {
|
||||||
|
return strings.ToLower(norm.NFD.String(path))
|
||||||
|
})
|
||||||
|
|
||||||
|
found, err := mediaFileRepository.FindByPaths(resolvedPaths)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
|
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
// Build lookup map with library-qualified keys, normalized for comparison
|
||||||
existing := make(map[string]int, len(found))
|
existing := make(map[string]int, len(found))
|
||||||
for idx := range found {
|
for idx := range found {
|
||||||
existing[normalizePathForComparison(found[idx].Path)] = idx
|
// Normalize to lowercase for case-insensitive comparison
|
||||||
|
// Key format: "libraryID:path"
|
||||||
|
key := fmt.Sprintf("%d:%s", found[idx].LibraryID, strings.ToLower(found[idx].Path))
|
||||||
|
existing[key] = idx
|
||||||
}
|
}
|
||||||
for _, path := range paths {
|
|
||||||
idx, ok := existing[normalizePathForComparison(path)]
|
// Find media files in the order of the resolved paths, to keep playlist order
|
||||||
|
for _, path := range resolvedPaths {
|
||||||
|
idx, ok := existing[path]
|
||||||
if ok {
|
if ok {
|
||||||
mfs = append(mfs, found[idx])
|
mfs = append(mfs, found[idx])
|
||||||
} else {
|
} else {
|
||||||
@ -226,69 +240,150 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// normalizePathForComparison normalizes a file path to NFC form and converts to lowercase
|
// pathResolution holds the result of resolving a playlist path to a library-relative path.
|
||||||
// for consistent comparison. This fixes Unicode normalization issues on macOS where
|
type pathResolution struct {
|
||||||
// Apple Music creates playlists with NFC-encoded paths but the filesystem uses NFD.
|
absolutePath string
|
||||||
func normalizePathForComparison(path string) string {
|
libraryPath string
|
||||||
return strings.ToLower(norm.NFC.String(path))
|
libraryID int
|
||||||
|
valid bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO This won't work for multiple libraries
|
// ToQualifiedString converts the path resolution to a library-qualified string with forward slashes.
|
||||||
func (s *playlists) normalizePaths(ctx context.Context, pls *model.Playlist, folder *model.Folder, lines []string) ([]string, error) {
|
// Format: "libraryID:relativePath" with forward slashes for path separators.
|
||||||
libRegex, err := s.compileLibraryPaths(ctx)
|
func (r pathResolution) ToQualifiedString() (string, error) {
|
||||||
|
if !r.valid {
|
||||||
|
return "", fmt.Errorf("invalid path resolution")
|
||||||
|
}
|
||||||
|
relativePath, err := filepath.Rel(r.libraryPath, r.absolutePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return "", err
|
||||||
}
|
}
|
||||||
|
// Convert path separators to forward slashes
|
||||||
res := make([]string, 0, len(lines))
|
return fmt.Sprintf("%d:%s", r.libraryID, filepath.ToSlash(relativePath)), nil
|
||||||
for idx, line := range lines {
|
|
||||||
var libPath string
|
|
||||||
var filePath string
|
|
||||||
|
|
||||||
if folder != nil && !filepath.IsAbs(line) {
|
|
||||||
libPath = folder.LibraryPath
|
|
||||||
filePath = filepath.Join(folder.AbsolutePath(), line)
|
|
||||||
} else {
|
|
||||||
cleanLine := filepath.Clean(line)
|
|
||||||
if libPath = libRegex.FindString(cleanLine); libPath != "" {
|
|
||||||
filePath = cleanLine
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if libPath != "" {
|
|
||||||
if rel, err := filepath.Rel(libPath, filePath); err == nil {
|
|
||||||
res = append(res, rel)
|
|
||||||
} else {
|
|
||||||
log.Debug(ctx, "Error getting relative path", "playlist", pls.Name, "path", line, "libPath", libPath,
|
|
||||||
"filePath", filePath, err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return slice.Map(res, filepath.ToSlash), nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *playlists) compileLibraryPaths(ctx context.Context) (*regexp.Regexp, error) {
|
// libraryMatcher holds sorted libraries with cleaned paths for efficient path matching.
|
||||||
libs, err := s.ds.Library(ctx).GetAll()
|
type libraryMatcher struct {
|
||||||
if err != nil {
|
libraries model.Libraries
|
||||||
return nil, err
|
cleanedPaths []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create regex patterns for each library path
|
// findLibraryForPath finds which library contains the given absolute path.
|
||||||
patterns := make([]string, len(libs))
|
// Returns library ID and path, or 0 and empty string if not found.
|
||||||
|
func (lm *libraryMatcher) findLibraryForPath(absolutePath string) (int, string) {
|
||||||
|
// Check sorted libraries (longest path first) to find the best match
|
||||||
|
for i, cleanLibPath := range lm.cleanedPaths {
|
||||||
|
// Check if absolutePath is under this library path
|
||||||
|
if strings.HasPrefix(absolutePath, cleanLibPath) {
|
||||||
|
// Ensure it's a proper path boundary (not just a prefix)
|
||||||
|
if len(absolutePath) == len(cleanLibPath) || absolutePath[len(cleanLibPath)] == filepath.Separator {
|
||||||
|
return lm.libraries[i].ID, cleanLibPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// newLibraryMatcher creates a libraryMatcher with libraries sorted by path length (longest first).
|
||||||
|
// This ensures correct matching when library paths are prefixes of each other.
|
||||||
|
// Example: /music-classical must be checked before /music
|
||||||
|
// Otherwise, /music-classical/track.mp3 would match /music instead of /music-classical
|
||||||
|
func newLibraryMatcher(libs model.Libraries) *libraryMatcher {
|
||||||
|
// Sort libraries by path length (descending) to ensure longest paths match first.
|
||||||
|
slices.SortFunc(libs, func(i, j model.Library) int {
|
||||||
|
return cmp.Compare(len(j.Path), len(i.Path)) // Reverse order for descending
|
||||||
|
})
|
||||||
|
|
||||||
|
// Pre-clean all library paths once for efficient matching
|
||||||
|
cleanedPaths := make([]string, len(libs))
|
||||||
for i, lib := range libs {
|
for i, lib := range libs {
|
||||||
cleanPath := filepath.Clean(lib.Path)
|
cleanedPaths[i] = filepath.Clean(lib.Path)
|
||||||
escapedPath := regexp.QuoteMeta(cleanPath)
|
|
||||||
patterns[i] = fmt.Sprintf("^%s(?:/|$)", escapedPath)
|
|
||||||
}
|
}
|
||||||
// Combine all patterns into a single regex
|
return &libraryMatcher{
|
||||||
combinedPattern := strings.Join(patterns, "|")
|
libraries: libs,
|
||||||
re, err := regexp.Compile(combinedPattern)
|
cleanedPaths: cleanedPaths,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// pathResolver handles path resolution logic for playlist imports.
|
||||||
|
type pathResolver struct {
|
||||||
|
matcher *libraryMatcher
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPathResolver creates a pathResolver with libraries loaded from the datastore.
|
||||||
|
func newPathResolver(ctx context.Context, ds model.DataStore) (*pathResolver, error) {
|
||||||
|
libs, err := ds.Library(ctx).GetAll()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("compiling library paths `%s`: %w", combinedPattern, err)
|
return nil, err
|
||||||
}
|
}
|
||||||
return re, nil
|
matcher := newLibraryMatcher(libs)
|
||||||
|
return &pathResolver{matcher: matcher}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolvePath determines the absolute path and library path for a playlist entry.
|
||||||
|
// For absolute paths, it uses them directly.
|
||||||
|
// For relative paths, it resolves them relative to the playlist's folder location.
|
||||||
|
// Example: playlist at /music/playlists/test.m3u with line "../songs/abc.mp3"
|
||||||
|
//
|
||||||
|
// resolves to /music/songs/abc.mp3
|
||||||
|
func (r *pathResolver) resolvePath(line string, folder *model.Folder) pathResolution {
|
||||||
|
var absolutePath string
|
||||||
|
if folder != nil && !filepath.IsAbs(line) {
|
||||||
|
// Resolve relative path to absolute path based on playlist location
|
||||||
|
absolutePath = filepath.Clean(filepath.Join(folder.AbsolutePath(), line))
|
||||||
|
} else {
|
||||||
|
// Use absolute path directly after cleaning
|
||||||
|
absolutePath = filepath.Clean(line)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.findInLibraries(absolutePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// findInLibraries matches an absolute path against all known libraries and returns
|
||||||
|
// a pathResolution with the library information. Returns an invalid resolution if
|
||||||
|
// the path is not found in any library.
|
||||||
|
func (r *pathResolver) findInLibraries(absolutePath string) pathResolution {
|
||||||
|
libID, libPath := r.matcher.findLibraryForPath(absolutePath)
|
||||||
|
if libID == 0 {
|
||||||
|
return pathResolution{valid: false}
|
||||||
|
}
|
||||||
|
return pathResolution{
|
||||||
|
absolutePath: absolutePath,
|
||||||
|
libraryPath: libPath,
|
||||||
|
libraryID: libID,
|
||||||
|
valid: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolvePaths converts playlist file paths to library-qualified paths (format: "libraryID:relativePath").
|
||||||
|
// For relative paths, it resolves them to absolute paths first, then determines which
|
||||||
|
// library they belong to. This allows playlists to reference files across library boundaries.
|
||||||
|
func (s *playlists) resolvePaths(ctx context.Context, folder *model.Folder, lines []string) ([]string, error) {
|
||||||
|
resolver, err := newPathResolver(ctx, s.ds)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
results := make([]string, 0, len(lines))
|
||||||
|
for idx, line := range lines {
|
||||||
|
resolution := resolver.resolvePath(line, folder)
|
||||||
|
|
||||||
|
if !resolution.valid {
|
||||||
|
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
qualifiedPath, err := resolution.ToQualifiedString()
|
||||||
|
if err != nil {
|
||||||
|
log.Debug(ctx, "Error getting library-qualified path", "path", line,
|
||||||
|
"libPath", resolution.libraryPath, "filePath", resolution.absolutePath, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, qualifiedPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {
|
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {
|
||||||
|
|||||||
406
core/playlists_internal_test.go
Normal file
406
core/playlists_internal_test.go
Normal file
@ -0,0 +1,406 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("libraryMatcher", func() {
|
||||||
|
var ds *tests.MockDataStore
|
||||||
|
var mockLibRepo *tests.MockLibraryRepo
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
mockLibRepo = &tests.MockLibraryRepo{}
|
||||||
|
ds = &tests.MockDataStore{
|
||||||
|
MockedLibrary: mockLibRepo,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Helper function to create a libraryMatcher from the mock datastore
|
||||||
|
createMatcher := func(ds model.DataStore) *libraryMatcher {
|
||||||
|
libs, err := ds.Library(ctx).GetAll()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
return newLibraryMatcher(libs)
|
||||||
|
}
|
||||||
|
|
||||||
|
Describe("Longest library path matching", func() {
|
||||||
|
It("matches the longest library path when multiple libraries share a prefix", func() {
|
||||||
|
// Setup libraries with prefix conflicts
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/music"},
|
||||||
|
{ID: 2, Path: "/music-classical"},
|
||||||
|
{ID: 3, Path: "/music-classical/opera"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
|
||||||
|
// Test that longest path matches first and returns correct library ID
|
||||||
|
testCases := []struct {
|
||||||
|
path string
|
||||||
|
expectedLibID int
|
||||||
|
expectedLibPath string
|
||||||
|
}{
|
||||||
|
{"/music-classical/opera/track.mp3", 3, "/music-classical/opera"},
|
||||||
|
{"/music-classical/track.mp3", 2, "/music-classical"},
|
||||||
|
{"/music/track.mp3", 1, "/music"},
|
||||||
|
{"/music-classical/opera/subdir/file.mp3", 3, "/music-classical/opera"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
libID, libPath := matcher.findLibraryForPath(tc.path)
|
||||||
|
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d, but got %d", tc.path, tc.expectedLibID, libID)
|
||||||
|
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s, but got %s", tc.path, tc.expectedLibPath, libPath)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles libraries with similar prefixes but different structures", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/home/user/music"},
|
||||||
|
{ID: 2, Path: "/home/user/music-backup"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
|
||||||
|
// Test that music-backup library is matched correctly
|
||||||
|
libID, libPath := matcher.findLibraryForPath("/home/user/music-backup/track.mp3")
|
||||||
|
Expect(libID).To(Equal(2))
|
||||||
|
Expect(libPath).To(Equal("/home/user/music-backup"))
|
||||||
|
|
||||||
|
// Test that music library is still matched correctly
|
||||||
|
libID, libPath = matcher.findLibraryForPath("/home/user/music/track.mp3")
|
||||||
|
Expect(libID).To(Equal(1))
|
||||||
|
Expect(libPath).To(Equal("/home/user/music"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("matches path that is exactly the library root", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/music"},
|
||||||
|
{ID: 2, Path: "/music-classical"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
|
||||||
|
// Exact library path should match
|
||||||
|
libID, libPath := matcher.findLibraryForPath("/music-classical")
|
||||||
|
Expect(libID).To(Equal(2))
|
||||||
|
Expect(libPath).To(Equal("/music-classical"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles complex nested library structures", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/media"},
|
||||||
|
{ID: 2, Path: "/media/audio"},
|
||||||
|
{ID: 3, Path: "/media/audio/classical"},
|
||||||
|
{ID: 4, Path: "/media/audio/classical/baroque"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
path string
|
||||||
|
expectedLibID int
|
||||||
|
expectedLibPath string
|
||||||
|
}{
|
||||||
|
{"/media/audio/classical/baroque/bach/track.mp3", 4, "/media/audio/classical/baroque"},
|
||||||
|
{"/media/audio/classical/mozart/track.mp3", 3, "/media/audio/classical"},
|
||||||
|
{"/media/audio/rock/track.mp3", 2, "/media/audio"},
|
||||||
|
{"/media/video/movie.mp4", 1, "/media"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
libID, libPath := matcher.findLibraryForPath(tc.path)
|
||||||
|
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
|
||||||
|
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s", tc.path, tc.expectedLibPath)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Edge cases", func() {
|
||||||
|
It("handles empty library list", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
Expect(matcher).ToNot(BeNil())
|
||||||
|
|
||||||
|
// Should not match anything
|
||||||
|
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
|
||||||
|
Expect(libID).To(Equal(0))
|
||||||
|
Expect(libPath).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles single library", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/music"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
|
||||||
|
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
|
||||||
|
Expect(libID).To(Equal(1))
|
||||||
|
Expect(libPath).To(Equal("/music"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles libraries with special characters in paths", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/music[test]"},
|
||||||
|
{ID: 2, Path: "/music(backup)"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
Expect(matcher).ToNot(BeNil())
|
||||||
|
|
||||||
|
// Special characters should match literally
|
||||||
|
libID, libPath := matcher.findLibraryForPath("/music[test]/track.mp3")
|
||||||
|
Expect(libID).To(Equal(1))
|
||||||
|
Expect(libPath).To(Equal("/music[test]"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Path matching order", func() {
|
||||||
|
It("ensures longest paths match first", func() {
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/a"},
|
||||||
|
{ID: 2, Path: "/ab"},
|
||||||
|
{ID: 3, Path: "/abc"},
|
||||||
|
})
|
||||||
|
|
||||||
|
matcher := createMatcher(ds)
|
||||||
|
|
||||||
|
// Verify that longer paths match correctly (not cut off by shorter prefix)
|
||||||
|
testCases := []struct {
|
||||||
|
path string
|
||||||
|
expectedLibID int
|
||||||
|
}{
|
||||||
|
{"/abc/file.mp3", 3},
|
||||||
|
{"/ab/file.mp3", 2},
|
||||||
|
{"/a/file.mp3", 1},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
libID, _ := matcher.findLibraryForPath(tc.path)
|
||||||
|
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
var _ = Describe("pathResolver", func() {
|
||||||
|
var ds *tests.MockDataStore
|
||||||
|
var mockLibRepo *tests.MockLibraryRepo
|
||||||
|
var resolver *pathResolver
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
mockLibRepo = &tests.MockLibraryRepo{}
|
||||||
|
ds = &tests.MockDataStore{
|
||||||
|
MockedLibrary: mockLibRepo,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup test libraries
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: "/music"},
|
||||||
|
{ID: 2, Path: "/music-classical"},
|
||||||
|
{ID: 3, Path: "/podcasts"},
|
||||||
|
})
|
||||||
|
|
||||||
|
var err error
|
||||||
|
resolver, err = newPathResolver(ctx, ds)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("resolvePath", func() {
|
||||||
|
It("resolves absolute paths", func() {
|
||||||
|
resolution := resolver.resolvePath("/music/artist/album/track.mp3", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(1))
|
||||||
|
Expect(resolution.libraryPath).To(Equal("/music"))
|
||||||
|
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("resolves relative paths when folder is provided", func() {
|
||||||
|
folder := &model.Folder{
|
||||||
|
Path: "playlists",
|
||||||
|
LibraryPath: "/music",
|
||||||
|
LibraryID: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
resolution := resolver.resolvePath("../artist/album/track.mp3", folder)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(1))
|
||||||
|
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns invalid resolution for paths outside any library", func() {
|
||||||
|
resolution := resolver.resolvePath("/outside/library/track.mp3", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("resolvePath", func() {
|
||||||
|
Context("With absolute paths", func() {
|
||||||
|
It("resolves path within a library", func() {
|
||||||
|
resolution := resolver.resolvePath("/music/track.mp3", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(1))
|
||||||
|
Expect(resolution.libraryPath).To(Equal("/music"))
|
||||||
|
Expect(resolution.absolutePath).To(Equal("/music/track.mp3"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("resolves path to the longest matching library", func() {
|
||||||
|
resolution := resolver.resolvePath("/music-classical/track.mp3", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(2))
|
||||||
|
Expect(resolution.libraryPath).To(Equal("/music-classical"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns invalid resolution for path outside libraries", func() {
|
||||||
|
resolution := resolver.resolvePath("/videos/movie.mp4", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("cleans the path before matching", func() {
|
||||||
|
resolution := resolver.resolvePath("/music//artist/../artist/track.mp3", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.absolutePath).To(Equal("/music/artist/track.mp3"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("With relative paths", func() {
|
||||||
|
It("resolves relative path within same library", func() {
|
||||||
|
folder := &model.Folder{
|
||||||
|
Path: "playlists",
|
||||||
|
LibraryPath: "/music",
|
||||||
|
LibraryID: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
resolution := resolver.resolvePath("../songs/track.mp3", folder)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(1))
|
||||||
|
Expect(resolution.absolutePath).To(Equal("/music/songs/track.mp3"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("resolves relative path to different library", func() {
|
||||||
|
folder := &model.Folder{
|
||||||
|
Path: "playlists",
|
||||||
|
LibraryPath: "/music",
|
||||||
|
LibraryID: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path goes up and into a different library
|
||||||
|
resolution := resolver.resolvePath("../../podcasts/episode.mp3", folder)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(3))
|
||||||
|
Expect(resolution.libraryPath).To(Equal("/podcasts"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses matcher to find correct library for resolved path", func() {
|
||||||
|
folder := &model.Folder{
|
||||||
|
Path: "playlists",
|
||||||
|
LibraryPath: "/music",
|
||||||
|
LibraryID: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
// This relative path resolves to music-classical library
|
||||||
|
resolution := resolver.resolvePath("../../music-classical/track.mp3", folder)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(2))
|
||||||
|
Expect(resolution.libraryPath).To(Equal("/music-classical"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns invalid for relative paths escaping all libraries", func() {
|
||||||
|
folder := &model.Folder{
|
||||||
|
Path: "playlists",
|
||||||
|
LibraryPath: "/music",
|
||||||
|
LibraryID: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
resolution := resolver.resolvePath("../../../../etc/passwd", folder)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Cross-library resolution scenarios", func() {
|
||||||
|
It("handles playlist in library A referencing file in library B", func() {
|
||||||
|
// Playlist is in /music/playlists
|
||||||
|
folder := &model.Folder{
|
||||||
|
Path: "playlists",
|
||||||
|
LibraryPath: "/music",
|
||||||
|
LibraryID: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relative path that goes to /podcasts library
|
||||||
|
resolution := resolver.resolvePath("../../podcasts/show/episode.mp3", folder)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(3), "Should resolve to podcasts library")
|
||||||
|
Expect(resolution.libraryPath).To(Equal("/podcasts"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("prefers longer library paths when resolving", func() {
|
||||||
|
// Ensure /music-classical is matched instead of /music
|
||||||
|
resolution := resolver.resolvePath("/music-classical/baroque/track.mp3", nil)
|
||||||
|
|
||||||
|
Expect(resolution.valid).To(BeTrue())
|
||||||
|
Expect(resolution.libraryID).To(Equal(2), "Should match /music-classical, not /music")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
var _ = Describe("pathResolution", func() {
|
||||||
|
Describe("ToQualifiedString", func() {
|
||||||
|
It("converts valid resolution to qualified string with forward slashes", func() {
|
||||||
|
resolution := pathResolution{
|
||||||
|
absolutePath: "/music/artist/album/track.mp3",
|
||||||
|
libraryPath: "/music",
|
||||||
|
libraryID: 1,
|
||||||
|
valid: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
qualifiedStr, err := resolution.ToQualifiedString()
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(qualifiedStr).To(Equal("1:artist/album/track.mp3"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles Windows-style paths by converting to forward slashes", func() {
|
||||||
|
resolution := pathResolution{
|
||||||
|
absolutePath: "/music/artist/album/track.mp3",
|
||||||
|
libraryPath: "/music",
|
||||||
|
libraryID: 2,
|
||||||
|
valid: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
qualifiedStr, err := resolution.ToQualifiedString()
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
// Should always use forward slashes regardless of OS
|
||||||
|
Expect(qualifiedStr).To(ContainSubstring("2:"))
|
||||||
|
Expect(qualifiedStr).ToNot(ContainSubstring("\\"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns error for invalid resolution", func() {
|
||||||
|
resolution := pathResolution{valid: false}
|
||||||
|
|
||||||
|
_, err := resolution.ToQualifiedString()
|
||||||
|
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package core
|
package core_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -9,6 +9,7 @@ import (
|
|||||||
|
|
||||||
"github.com/navidrome/navidrome/conf"
|
"github.com/navidrome/navidrome/conf"
|
||||||
"github.com/navidrome/navidrome/conf/configtest"
|
"github.com/navidrome/navidrome/conf/configtest"
|
||||||
|
"github.com/navidrome/navidrome/core"
|
||||||
"github.com/navidrome/navidrome/model"
|
"github.com/navidrome/navidrome/model"
|
||||||
"github.com/navidrome/navidrome/model/criteria"
|
"github.com/navidrome/navidrome/model/criteria"
|
||||||
"github.com/navidrome/navidrome/model/request"
|
"github.com/navidrome/navidrome/model/request"
|
||||||
@ -20,7 +21,7 @@ import (
|
|||||||
|
|
||||||
var _ = Describe("Playlists", func() {
|
var _ = Describe("Playlists", func() {
|
||||||
var ds *tests.MockDataStore
|
var ds *tests.MockDataStore
|
||||||
var ps Playlists
|
var ps core.Playlists
|
||||||
var mockPlsRepo mockedPlaylistRepo
|
var mockPlsRepo mockedPlaylistRepo
|
||||||
var mockLibRepo *tests.MockLibraryRepo
|
var mockLibRepo *tests.MockLibraryRepo
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
@ -33,16 +34,16 @@ var _ = Describe("Playlists", func() {
|
|||||||
MockedLibrary: mockLibRepo,
|
MockedLibrary: mockLibRepo,
|
||||||
}
|
}
|
||||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||||
// Path should be libPath, but we want to match the root folder referenced in the m3u, which is `/`
|
|
||||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/"}})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("ImportFile", func() {
|
Describe("ImportFile", func() {
|
||||||
var folder *model.Folder
|
var folder *model.Folder
|
||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
ps = NewPlaylists(ds)
|
ps = core.NewPlaylists(ds)
|
||||||
ds.MockedMediaFile = &mockedMediaFileRepo{}
|
ds.MockedMediaFile = &mockedMediaFileRepo{}
|
||||||
libPath, _ := os.Getwd()
|
libPath, _ := os.Getwd()
|
||||||
|
// Set up library with the actual library path that matches the folder
|
||||||
|
mockLibRepo.SetData([]model.Library{{ID: 1, Path: libPath}})
|
||||||
folder = &model.Folder{
|
folder = &model.Folder{
|
||||||
ID: "1",
|
ID: "1",
|
||||||
LibraryID: 1,
|
LibraryID: 1,
|
||||||
@ -112,6 +113,224 @@ var _ = Describe("Playlists", func() {
|
|||||||
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
|
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Describe("Cross-library relative paths", func() {
|
||||||
|
var tmpDir, plsDir, songsDir string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
// Create temp directory structure
|
||||||
|
tmpDir = GinkgoT().TempDir()
|
||||||
|
plsDir = tmpDir + "/playlists"
|
||||||
|
songsDir = tmpDir + "/songs"
|
||||||
|
Expect(os.Mkdir(plsDir, 0755)).To(Succeed())
|
||||||
|
Expect(os.Mkdir(songsDir, 0755)).To(Succeed())
|
||||||
|
|
||||||
|
// Setup two different libraries with paths matching our temp structure
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: songsDir},
|
||||||
|
{ID: 2, Path: plsDir},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Create a mock media file repository that returns files for both libraries
|
||||||
|
// Note: The paths are relative to their respective library roots
|
||||||
|
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||||
|
data: []string{
|
||||||
|
"abc.mp3", // This is songs/abc.mp3 relative to songsDir
|
||||||
|
"def.mp3", // This is playlists/def.mp3 relative to plsDir
|
||||||
|
},
|
||||||
|
}
|
||||||
|
ps = core.NewPlaylists(ds)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles relative paths that reference files in other libraries", func() {
|
||||||
|
// Create a temporary playlist file with relative path
|
||||||
|
plsContent := "#PLAYLIST:Cross Library Test\n../songs/abc.mp3\ndef.mp3"
|
||||||
|
plsFile := plsDir + "/test.m3u"
|
||||||
|
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
// Playlist is in the Playlists library folder
|
||||||
|
// Important: Path should be relative to LibraryPath, and Name is the folder name
|
||||||
|
plsFolder := &model.Folder{
|
||||||
|
ID: "2",
|
||||||
|
LibraryID: 2,
|
||||||
|
LibraryPath: plsDir,
|
||||||
|
Path: "",
|
||||||
|
Name: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(pls.Tracks).To(HaveLen(2))
|
||||||
|
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
|
||||||
|
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library
|
||||||
|
})
|
||||||
|
|
||||||
|
It("ignores paths that point outside all libraries", func() {
|
||||||
|
// Create a temporary playlist file with path outside libraries
|
||||||
|
plsContent := "#PLAYLIST:Outside Test\n../../outside.mp3\nabc.mp3"
|
||||||
|
plsFile := plsDir + "/test.m3u"
|
||||||
|
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
plsFolder := &model.Folder{
|
||||||
|
ID: "2",
|
||||||
|
LibraryID: 2,
|
||||||
|
LibraryPath: plsDir,
|
||||||
|
Path: "",
|
||||||
|
Name: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
// Should only find abc.mp3, not outside.mp3
|
||||||
|
Expect(pls.Tracks).To(HaveLen(1))
|
||||||
|
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles relative paths with multiple '../' components", func() {
|
||||||
|
// Create a nested structure: tmpDir/playlists/subfolder/test.m3u
|
||||||
|
subFolder := plsDir + "/subfolder"
|
||||||
|
Expect(os.Mkdir(subFolder, 0755)).To(Succeed())
|
||||||
|
|
||||||
|
// Create the media file in the subfolder directory
|
||||||
|
// The mock will return it as "def.mp3" relative to plsDir
|
||||||
|
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||||
|
data: []string{
|
||||||
|
"abc.mp3", // From songsDir library
|
||||||
|
"def.mp3", // From plsDir library root
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// From subfolder, ../../songs/abc.mp3 should resolve to songs library
|
||||||
|
// ../def.mp3 should resolve to plsDir/def.mp3
|
||||||
|
plsContent := "#PLAYLIST:Nested Test\n../../songs/abc.mp3\n../def.mp3"
|
||||||
|
plsFile := subFolder + "/test.m3u"
|
||||||
|
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
// The folder: AbsolutePath = LibraryPath + Path + Name
|
||||||
|
// So for /playlists/subfolder: LibraryPath=/playlists, Path="", Name="subfolder"
|
||||||
|
plsFolder := &model.Folder{
|
||||||
|
ID: "2",
|
||||||
|
LibraryID: 2,
|
||||||
|
LibraryPath: plsDir,
|
||||||
|
Path: "", // Empty because subfolder is directly under library root
|
||||||
|
Name: "subfolder", // The folder name
|
||||||
|
}
|
||||||
|
|
||||||
|
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(pls.Tracks).To(HaveLen(2))
|
||||||
|
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
|
||||||
|
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library root
|
||||||
|
})
|
||||||
|
|
||||||
|
It("correctly resolves libraries when one path is a prefix of another", func() {
|
||||||
|
// This tests the bug where /music would match before /music-classical
|
||||||
|
// Create temp directory structure with prefix conflict
|
||||||
|
tmpDir := GinkgoT().TempDir()
|
||||||
|
musicDir := tmpDir + "/music"
|
||||||
|
musicClassicalDir := tmpDir + "/music-classical"
|
||||||
|
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
|
||||||
|
Expect(os.Mkdir(musicClassicalDir, 0755)).To(Succeed())
|
||||||
|
|
||||||
|
// Setup two libraries where one is a prefix of the other
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: musicDir}, // /tmp/xxx/music
|
||||||
|
{ID: 2, Path: musicClassicalDir}, // /tmp/xxx/music-classical
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mock will return tracks from both libraries
|
||||||
|
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||||
|
data: []string{
|
||||||
|
"rock.mp3", // From music library
|
||||||
|
"bach.mp3", // From music-classical library
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create playlist in music library that references music-classical
|
||||||
|
plsContent := "#PLAYLIST:Cross Prefix Test\nrock.mp3\n../music-classical/bach.mp3"
|
||||||
|
plsFile := musicDir + "/test.m3u"
|
||||||
|
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
plsFolder := &model.Folder{
|
||||||
|
ID: "1",
|
||||||
|
LibraryID: 1,
|
||||||
|
LibraryPath: musicDir,
|
||||||
|
Path: "",
|
||||||
|
Name: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(pls.Tracks).To(HaveLen(2))
|
||||||
|
Expect(pls.Tracks[0].Path).To(Equal("rock.mp3")) // From music library
|
||||||
|
Expect(pls.Tracks[1].Path).To(Equal("bach.mp3")) // From music-classical library (not music!)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("correctly handles identical relative paths from different libraries", func() {
|
||||||
|
// This tests the bug where two libraries have files at the same relative path
|
||||||
|
// and only one appears in the playlist
|
||||||
|
tmpDir := GinkgoT().TempDir()
|
||||||
|
musicDir := tmpDir + "/music"
|
||||||
|
classicalDir := tmpDir + "/classical"
|
||||||
|
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
|
||||||
|
Expect(os.Mkdir(classicalDir, 0755)).To(Succeed())
|
||||||
|
Expect(os.MkdirAll(musicDir+"/album", 0755)).To(Succeed())
|
||||||
|
Expect(os.MkdirAll(classicalDir+"/album", 0755)).To(Succeed())
|
||||||
|
// Create placeholder files so paths resolve correctly
|
||||||
|
Expect(os.WriteFile(musicDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
|
||||||
|
Expect(os.WriteFile(classicalDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
|
||||||
|
|
||||||
|
// Both libraries have a file at "album/track.mp3"
|
||||||
|
mockLibRepo.SetData([]model.Library{
|
||||||
|
{ID: 1, Path: musicDir},
|
||||||
|
{ID: 2, Path: classicalDir},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mock returns files with same relative path but different IDs and library IDs
|
||||||
|
// Keys use the library-qualified format: "libraryID:path"
|
||||||
|
ds.MockedMediaFile = &mockedMediaFileRepo{
|
||||||
|
data: map[string]model.MediaFile{
|
||||||
|
"1:album/track.mp3": {ID: "music-track", Path: "album/track.mp3", LibraryID: 1, Title: "Rock Song"},
|
||||||
|
"2:album/track.mp3": {ID: "classical-track", Path: "album/track.mp3", LibraryID: 2, Title: "Classical Piece"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// Recreate playlists service to pick up new mock
|
||||||
|
ps = core.NewPlaylists(ds)
|
||||||
|
|
||||||
|
// Create playlist in music library that references both tracks
|
||||||
|
plsContent := "#PLAYLIST:Same Path Test\nalbum/track.mp3\n../classical/album/track.mp3"
|
||||||
|
plsFile := musicDir + "/test.m3u"
|
||||||
|
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
plsFolder := &model.Folder{
|
||||||
|
ID: "1",
|
||||||
|
LibraryID: 1,
|
||||||
|
LibraryPath: musicDir,
|
||||||
|
Path: "",
|
||||||
|
Name: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
// Should have BOTH tracks, not just one
|
||||||
|
Expect(pls.Tracks).To(HaveLen(2), "Playlist should contain both tracks with same relative path")
|
||||||
|
|
||||||
|
// Verify we got tracks from DIFFERENT libraries (the key fix!)
|
||||||
|
// Collect the library IDs
|
||||||
|
libIDs := make(map[int]bool)
|
||||||
|
for _, track := range pls.Tracks {
|
||||||
|
libIDs[track.LibraryID] = true
|
||||||
|
}
|
||||||
|
Expect(libIDs).To(HaveLen(2), "Tracks should come from two different libraries")
|
||||||
|
Expect(libIDs[1]).To(BeTrue(), "Should have track from library 1")
|
||||||
|
Expect(libIDs[2]).To(BeTrue(), "Should have track from library 2")
|
||||||
|
|
||||||
|
// Both tracks should have the same relative path
|
||||||
|
Expect(pls.Tracks[0].Path).To(Equal("album/track.mp3"))
|
||||||
|
Expect(pls.Tracks[1].Path).To(Equal("album/track.mp3"))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("ImportM3U", func() {
|
Describe("ImportM3U", func() {
|
||||||
@ -119,7 +338,7 @@ var _ = Describe("Playlists", func() {
|
|||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
repo = &mockedMediaFileFromListRepo{}
|
repo = &mockedMediaFileFromListRepo{}
|
||||||
ds.MockedMediaFile = repo
|
ds.MockedMediaFile = repo
|
||||||
ps = NewPlaylists(ds)
|
ps = core.NewPlaylists(ds)
|
||||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/music"}, {ID: 2, Path: "/new"}})
|
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/music"}, {ID: 2, Path: "/new"}})
|
||||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||||
})
|
})
|
||||||
@ -206,53 +425,23 @@ var _ = Describe("Playlists", func() {
|
|||||||
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
|
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
|
||||||
})
|
})
|
||||||
|
|
||||||
It("handles Unicode normalization when comparing paths", func() {
|
It("handles Unicode normalization when comparing paths (NFD vs NFC)", func() {
|
||||||
// Test case for Apple Music playlists that use NFC encoding vs macOS filesystem NFD
|
// Simulate macOS filesystem: stores paths in NFD (decomposed) form
|
||||||
// The character "è" can be represented as NFC (single codepoint) or NFD (e + combining accent)
|
// "è" (U+00E8) in NFC becomes "e" + "◌̀" (U+0065 + U+0300) in NFD
|
||||||
|
nfdPath := "artist/Mich" + string([]rune{'e', '\u0300'}) + "le/song.mp3" // NFD: e + combining grave
|
||||||
const pathWithAccents = "artist/Michèle Desrosiers/album/Noël.m4a"
|
|
||||||
|
|
||||||
// Simulate a database entry with NFD encoding (as stored by macOS filesystem)
|
|
||||||
nfdPath := norm.NFD.String(pathWithAccents)
|
|
||||||
repo.data = []string{nfdPath}
|
repo.data = []string{nfdPath}
|
||||||
|
|
||||||
// Simulate an Apple Music M3U playlist entry with NFC encoding
|
// Simulate Apple Music M3U: uses NFC (composed) form
|
||||||
nfcPath := norm.NFC.String("/music/" + pathWithAccents)
|
nfcPath := "/music/artist/Mich\u00E8le/song.mp3" // NFC: single è character
|
||||||
m3u := strings.Join([]string{
|
m3u := nfcPath + "\n"
|
||||||
nfcPath,
|
|
||||||
}, "\n")
|
|
||||||
f := strings.NewReader(m3u)
|
f := strings.NewReader(m3u)
|
||||||
|
|
||||||
pls, err := ps.ImportM3U(ctx, f)
|
pls, err := ps.ImportM3U(ctx, f)
|
||||||
Expect(err).ToNot(HaveOccurred())
|
Expect(err).ToNot(HaveOccurred())
|
||||||
Expect(pls.Tracks).To(HaveLen(1), "Should find the track despite Unicode normalization differences")
|
Expect(pls.Tracks).To(HaveLen(1))
|
||||||
|
// Should match despite different Unicode normalization forms
|
||||||
Expect(pls.Tracks[0].Path).To(Equal(nfdPath))
|
Expect(pls.Tracks[0].Path).To(Equal(nfdPath))
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
Describe("normalizePathForComparison", func() {
|
|
||||||
It("normalizes Unicode characters to NFC form and converts to lowercase", func() {
|
|
||||||
// Test with NFD (decomposed) input - as would come from macOS filesystem
|
|
||||||
nfdPath := norm.NFD.String("Michèle") // Explicitly convert to NFD form
|
|
||||||
normalized := normalizePathForComparison(nfdPath)
|
|
||||||
Expect(normalized).To(Equal("michèle"))
|
|
||||||
|
|
||||||
// Test with NFC (composed) input - as would come from Apple Music M3U
|
|
||||||
nfcPath := "Michèle" // This might be in NFC form
|
|
||||||
normalizedNfc := normalizePathForComparison(nfcPath)
|
|
||||||
|
|
||||||
// Ensure the two paths are not equal in their original forms
|
|
||||||
Expect(nfdPath).ToNot(Equal(nfcPath))
|
|
||||||
|
|
||||||
// Both should normalize to the same result
|
|
||||||
Expect(normalized).To(Equal(normalizedNfc))
|
|
||||||
})
|
|
||||||
|
|
||||||
It("handles paths with mixed case and Unicode characters", func() {
|
|
||||||
path := "Artist/Noël Coward/Album/Song.mp3"
|
|
||||||
normalized := normalizePathForComparison(path)
|
|
||||||
Expect(normalized).To(Equal("artist/noël coward/album/song.mp3"))
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("InPlaylistsPath", func() {
|
Describe("InPlaylistsPath", func() {
|
||||||
@ -269,27 +458,27 @@ var _ = Describe("Playlists", func() {
|
|||||||
|
|
||||||
It("returns true if PlaylistsPath is empty", func() {
|
It("returns true if PlaylistsPath is empty", func() {
|
||||||
conf.Server.PlaylistsPath = ""
|
conf.Server.PlaylistsPath = ""
|
||||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||||
})
|
})
|
||||||
|
|
||||||
It("returns true if PlaylistsPath is any (**/**)", func() {
|
It("returns true if PlaylistsPath is any (**/**)", func() {
|
||||||
conf.Server.PlaylistsPath = "**/**"
|
conf.Server.PlaylistsPath = "**/**"
|
||||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||||
})
|
})
|
||||||
|
|
||||||
It("returns true if folder is in PlaylistsPath", func() {
|
It("returns true if folder is in PlaylistsPath", func() {
|
||||||
conf.Server.PlaylistsPath = "other/**:playlists/**"
|
conf.Server.PlaylistsPath = "other/**:playlists/**"
|
||||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||||
})
|
})
|
||||||
|
|
||||||
It("returns false if folder is not in PlaylistsPath", func() {
|
It("returns false if folder is not in PlaylistsPath", func() {
|
||||||
conf.Server.PlaylistsPath = "other"
|
conf.Server.PlaylistsPath = "other"
|
||||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
|
||||||
})
|
})
|
||||||
|
|
||||||
It("returns true if for a playlist in root of MusicFolder if PlaylistsPath is '.'", func() {
|
It("returns true if for a playlist in root of MusicFolder if PlaylistsPath is '.'", func() {
|
||||||
conf.Server.PlaylistsPath = "."
|
conf.Server.PlaylistsPath = "."
|
||||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
|
||||||
|
|
||||||
folder2 := model.Folder{
|
folder2 := model.Folder{
|
||||||
LibraryPath: "/music",
|
LibraryPath: "/music",
|
||||||
@ -297,22 +486,47 @@ var _ = Describe("Playlists", func() {
|
|||||||
Name: ".",
|
Name: ".",
|
||||||
}
|
}
|
||||||
|
|
||||||
Expect(InPlaylistsPath(folder2)).To(BeTrue())
|
Expect(core.InPlaylistsPath(folder2)).To(BeTrue())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// mockedMediaFileRepo's FindByPaths method returns a list of MediaFiles with the same paths as the input
|
// mockedMediaFileRepo's FindByPaths method returns MediaFiles for the given paths.
|
||||||
|
// If data map is provided, looks up files by key; otherwise creates them from paths.
|
||||||
type mockedMediaFileRepo struct {
|
type mockedMediaFileRepo struct {
|
||||||
model.MediaFileRepository
|
model.MediaFileRepository
|
||||||
|
data map[string]model.MediaFile
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockedMediaFileRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
func (r *mockedMediaFileRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||||
var mfs model.MediaFiles
|
var mfs model.MediaFiles
|
||||||
|
|
||||||
|
// If data map provided, look up files
|
||||||
|
if r.data != nil {
|
||||||
|
for _, path := range paths {
|
||||||
|
if mf, ok := r.data[path]; ok {
|
||||||
|
mfs = append(mfs, mf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return mfs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, create MediaFiles from paths
|
||||||
for idx, path := range paths {
|
for idx, path := range paths {
|
||||||
|
// Strip library qualifier if present (format: "libraryID:path")
|
||||||
|
actualPath := path
|
||||||
|
libraryID := 1
|
||||||
|
if parts := strings.SplitN(path, ":", 2); len(parts) == 2 {
|
||||||
|
if id, err := strconv.Atoi(parts[0]); err == nil {
|
||||||
|
libraryID = id
|
||||||
|
actualPath = parts[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
mfs = append(mfs, model.MediaFile{
|
mfs = append(mfs, model.MediaFile{
|
||||||
ID: strconv.Itoa(idx),
|
ID: strconv.Itoa(idx),
|
||||||
Path: path,
|
Path: actualPath,
|
||||||
|
LibraryID: libraryID,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return mfs, nil
|
return mfs, nil
|
||||||
@ -324,13 +538,38 @@ type mockedMediaFileFromListRepo struct {
|
|||||||
data []string
|
data []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockedMediaFileFromListRepo) FindByPaths([]string) (model.MediaFiles, error) {
|
func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||||
var mfs model.MediaFiles
|
var mfs model.MediaFiles
|
||||||
for idx, path := range r.data {
|
|
||||||
mfs = append(mfs, model.MediaFile{
|
for idx, dataPath := range r.data {
|
||||||
ID: strconv.Itoa(idx),
|
// Normalize the data path to NFD (simulates macOS filesystem storage)
|
||||||
Path: path,
|
normalizedDataPath := norm.NFD.String(dataPath)
|
||||||
})
|
|
||||||
|
for _, requestPath := range paths {
|
||||||
|
// Strip library qualifier if present (format: "libraryID:path")
|
||||||
|
actualPath := requestPath
|
||||||
|
libraryID := 1
|
||||||
|
if parts := strings.SplitN(requestPath, ":", 2); len(parts) == 2 {
|
||||||
|
if id, err := strconv.Atoi(parts[0]); err == nil {
|
||||||
|
libraryID = id
|
||||||
|
actualPath = parts[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The request path should already be normalized to NFD by production code
|
||||||
|
// before calling FindByPaths (to match DB storage)
|
||||||
|
normalizedRequestPath := norm.NFD.String(actualPath)
|
||||||
|
|
||||||
|
// Case-insensitive comparison (like SQL's "collate nocase")
|
||||||
|
if strings.EqualFold(normalizedRequestPath, normalizedDataPath) {
|
||||||
|
mfs = append(mfs, model.MediaFile{
|
||||||
|
ID: strconv.Itoa(idx),
|
||||||
|
Path: dataPath, // Return original path from DB
|
||||||
|
LibraryID: libraryID,
|
||||||
|
})
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return mfs, nil
|
return mfs, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -32,6 +32,7 @@ type Submission struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type nowPlayingEntry struct {
|
type nowPlayingEntry struct {
|
||||||
|
ctx context.Context
|
||||||
userId string
|
userId string
|
||||||
track *model.MediaFile
|
track *model.MediaFile
|
||||||
position int
|
position int
|
||||||
@ -220,15 +221,17 @@ func (p *playTracker) NowPlaying(ctx context.Context, playerId string, playerNam
|
|||||||
}
|
}
|
||||||
player, _ := request.PlayerFrom(ctx)
|
player, _ := request.PlayerFrom(ctx)
|
||||||
if player.ScrobbleEnabled {
|
if player.ScrobbleEnabled {
|
||||||
p.enqueueNowPlaying(playerId, user.ID, mf, position)
|
p.enqueueNowPlaying(ctx, playerId, user.ID, mf, position)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *playTracker) enqueueNowPlaying(playerId string, userId string, track *model.MediaFile, position int) {
|
func (p *playTracker) enqueueNowPlaying(ctx context.Context, playerId string, userId string, track *model.MediaFile, position int) {
|
||||||
p.npMu.Lock()
|
p.npMu.Lock()
|
||||||
defer p.npMu.Unlock()
|
defer p.npMu.Unlock()
|
||||||
|
ctx = context.WithoutCancel(ctx) // Prevent cancellation from affecting background processing
|
||||||
p.npQueue[playerId] = nowPlayingEntry{
|
p.npQueue[playerId] = nowPlayingEntry{
|
||||||
|
ctx: ctx,
|
||||||
userId: userId,
|
userId: userId,
|
||||||
track: track,
|
track: track,
|
||||||
position: position,
|
position: position,
|
||||||
@ -267,7 +270,7 @@ func (p *playTracker) nowPlayingWorker() {
|
|||||||
|
|
||||||
// Process entries without holding lock
|
// Process entries without holding lock
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
p.dispatchNowPlaying(context.Background(), entry.userId, entry.track, entry.position)
|
p.dispatchNowPlaying(entry.ctx, entry.userId, entry.track, entry.position)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -345,8 +348,14 @@ func (p *playTracker) incPlay(ctx context.Context, track *model.MediaFile, times
|
|||||||
}
|
}
|
||||||
for _, artist := range track.Participants[model.RoleArtist] {
|
for _, artist := range track.Participants[model.RoleArtist] {
|
||||||
err = tx.Artist(ctx).IncPlayCount(artist.ID, timestamp)
|
err = tx.Artist(ctx).IncPlayCount(artist.ID, timestamp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return err
|
if conf.Server.EnableScrobbleHistory {
|
||||||
|
return tx.Scrobble(ctx).RecordScrobble(track.ID, timestamp)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -61,7 +61,7 @@ var _ = Describe("PlayTracker", func() {
|
|||||||
|
|
||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
DeferCleanup(configtest.SetupConfig())
|
DeferCleanup(configtest.SetupConfig())
|
||||||
ctx = context.Background()
|
ctx = GinkgoT().Context()
|
||||||
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
||||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||||
ds = &tests.MockDataStore{}
|
ds = &tests.MockDataStore{}
|
||||||
@ -170,6 +170,17 @@ var _ = Describe("PlayTracker", func() {
|
|||||||
Expect(err).ToNot(HaveOccurred())
|
Expect(err).ToNot(HaveOccurred())
|
||||||
Expect(eventBroker.getEvents()).To(BeEmpty())
|
Expect(eventBroker.getEvents()).To(BeEmpty())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
It("passes user to scrobbler via context (fix for issue #4787)", func() {
|
||||||
|
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "testuser"})
|
||||||
|
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||||
|
|
||||||
|
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||||
|
// Verify the username was passed through async dispatch via context
|
||||||
|
Eventually(func() string { return fake.GetUsername() }).Should(Equal("testuser"))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("GetNowPlaying", func() {
|
Describe("GetNowPlaying", func() {
|
||||||
@ -177,9 +188,9 @@ var _ = Describe("PlayTracker", func() {
|
|||||||
track2 := track
|
track2 := track
|
||||||
track2.ID = "456"
|
track2.ID = "456"
|
||||||
_ = ds.MediaFile(ctx).Put(&track2)
|
_ = ds.MediaFile(ctx).Put(&track2)
|
||||||
ctx = request.WithUser(context.Background(), model.User{UserName: "user-1"})
|
ctx = request.WithUser(GinkgoT().Context(), model.User{UserName: "user-1"})
|
||||||
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||||
ctx = request.WithUser(context.Background(), model.User{UserName: "user-2"})
|
ctx = request.WithUser(GinkgoT().Context(), model.User{UserName: "user-2"})
|
||||||
_ = tracker.NowPlaying(ctx, "player-2", "player-two", "456", 0)
|
_ = tracker.NowPlaying(ctx, "player-2", "player-two", "456", 0)
|
||||||
|
|
||||||
playing, err := tracker.GetNowPlaying(ctx)
|
playing, err := tracker.GetNowPlaying(ctx)
|
||||||
@ -291,6 +302,38 @@ var _ = Describe("PlayTracker", func() {
|
|||||||
Expect(artist1.PlayCount).To(Equal(int64(1)))
|
Expect(artist1.PlayCount).To(Equal(int64(1)))
|
||||||
Expect(artist2.PlayCount).To(Equal(int64(1)))
|
Expect(artist2.PlayCount).To(Equal(int64(1)))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Context("Scrobble History", func() {
|
||||||
|
It("records scrobble in repository", func() {
|
||||||
|
conf.Server.EnableScrobbleHistory = true
|
||||||
|
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||||
|
ts := time.Now()
|
||||||
|
|
||||||
|
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
mockDS := ds.(*tests.MockDataStore)
|
||||||
|
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||||
|
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(1))
|
||||||
|
Expect(mockScrobble.RecordedScrobbles[0].MediaFileID).To(Equal("123"))
|
||||||
|
Expect(mockScrobble.RecordedScrobbles[0].UserID).To(Equal("u-1"))
|
||||||
|
Expect(mockScrobble.RecordedScrobbles[0].SubmissionTime).To(Equal(ts))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("does not record scrobble when history is disabled", func() {
|
||||||
|
conf.Server.EnableScrobbleHistory = false
|
||||||
|
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||||
|
ts := time.Now()
|
||||||
|
|
||||||
|
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
mockDS := ds.(*tests.MockDataStore)
|
||||||
|
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||||
|
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(0))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("Plugin scrobbler logic", func() {
|
Describe("Plugin scrobbler logic", func() {
|
||||||
@ -352,7 +395,7 @@ var _ = Describe("PlayTracker", func() {
|
|||||||
var mockedBS *mockBufferedScrobbler
|
var mockedBS *mockBufferedScrobbler
|
||||||
|
|
||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
ctx = context.Background()
|
ctx = GinkgoT().Context()
|
||||||
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
||||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||||
ds = &tests.MockDataStore{}
|
ds = &tests.MockDataStore{}
|
||||||
@ -396,6 +439,7 @@ type fakeScrobbler struct {
|
|||||||
nowPlayingCalled atomic.Bool
|
nowPlayingCalled atomic.Bool
|
||||||
ScrobbleCalled atomic.Bool
|
ScrobbleCalled atomic.Bool
|
||||||
userID atomic.Pointer[string]
|
userID atomic.Pointer[string]
|
||||||
|
username atomic.Pointer[string]
|
||||||
track atomic.Pointer[model.MediaFile]
|
track atomic.Pointer[model.MediaFile]
|
||||||
position atomic.Int32
|
position atomic.Int32
|
||||||
LastScrobble atomic.Pointer[Scrobble]
|
LastScrobble atomic.Pointer[Scrobble]
|
||||||
@ -421,6 +465,13 @@ func (f *fakeScrobbler) GetPosition() int {
|
|||||||
return int(f.position.Load())
|
return int(f.position.Load())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *fakeScrobbler) GetUsername() string {
|
||||||
|
if p := f.username.Load(); p != nil {
|
||||||
|
return *p
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
func (f *fakeScrobbler) IsAuthorized(ctx context.Context, userId string) bool {
|
func (f *fakeScrobbler) IsAuthorized(ctx context.Context, userId string) bool {
|
||||||
return f.Error == nil && f.Authorized
|
return f.Error == nil && f.Authorized
|
||||||
}
|
}
|
||||||
@ -431,6 +482,16 @@ func (f *fakeScrobbler) NowPlaying(ctx context.Context, userId string, track *mo
|
|||||||
return f.Error
|
return f.Error
|
||||||
}
|
}
|
||||||
f.userID.Store(&userId)
|
f.userID.Store(&userId)
|
||||||
|
// Capture username from context (this is what plugin scrobblers do)
|
||||||
|
username, _ := request.UsernameFrom(ctx)
|
||||||
|
if username == "" {
|
||||||
|
if u, ok := request.UserFrom(ctx); ok {
|
||||||
|
username = u.UserName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if username != "" {
|
||||||
|
f.username.Store(&username)
|
||||||
|
}
|
||||||
f.track.Store(track)
|
f.track.Store(track)
|
||||||
f.position.Store(int32(position))
|
f.position.Store(int32(position))
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
20
db/migrations/20251206013022_create_scrobbles_table.sql
Normal file
20
db/migrations/20251206013022_create_scrobbles_table.sql
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
-- +goose Up
|
||||||
|
-- +goose StatementBegin
|
||||||
|
CREATE TABLE scrobbles(
|
||||||
|
media_file_id VARCHAR(255) NOT NULL
|
||||||
|
REFERENCES media_file(id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE CASCADE,
|
||||||
|
user_id VARCHAR(255) NOT NULL
|
||||||
|
REFERENCES user(id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE CASCADE,
|
||||||
|
submission_time INTEGER NOT NULL
|
||||||
|
);
|
||||||
|
CREATE INDEX scrobbles_date ON scrobbles (submission_time);
|
||||||
|
-- +goose StatementEnd
|
||||||
|
|
||||||
|
-- +goose Down
|
||||||
|
-- +goose StatementBegin
|
||||||
|
DROP TABLE scrobbles;
|
||||||
|
-- +goose StatementEnd
|
||||||
35
go.mod
35
go.mod
@ -39,13 +39,13 @@ require (
|
|||||||
github.com/knqyf263/go-plugin v0.9.0
|
github.com/knqyf263/go-plugin v0.9.0
|
||||||
github.com/kr/pretty v0.3.1
|
github.com/kr/pretty v0.3.1
|
||||||
github.com/lestrrat-go/jwx/v2 v2.1.6
|
github.com/lestrrat-go/jwx/v2 v2.1.6
|
||||||
github.com/maruel/natural v1.2.1
|
github.com/maruel/natural v1.3.0
|
||||||
github.com/matoous/go-nanoid/v2 v2.1.0
|
github.com/matoous/go-nanoid/v2 v2.1.0
|
||||||
github.com/mattn/go-sqlite3 v1.14.32
|
github.com/mattn/go-sqlite3 v1.14.32
|
||||||
github.com/microcosm-cc/bluemonday v1.0.27
|
github.com/microcosm-cc/bluemonday v1.0.27
|
||||||
github.com/mileusna/useragent v1.3.5
|
github.com/mileusna/useragent v1.3.5
|
||||||
github.com/onsi/ginkgo/v2 v2.27.2
|
github.com/onsi/ginkgo/v2 v2.27.3
|
||||||
github.com/onsi/gomega v1.38.2
|
github.com/onsi/gomega v1.38.3
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4
|
github.com/pelletier/go-toml/v2 v2.2.4
|
||||||
github.com/pocketbase/dbx v1.11.0
|
github.com/pocketbase/dbx v1.11.0
|
||||||
github.com/pressly/goose/v3 v3.26.0
|
github.com/pressly/goose/v3 v3.26.0
|
||||||
@ -54,21 +54,22 @@ require (
|
|||||||
github.com/robfig/cron/v3 v3.0.1
|
github.com/robfig/cron/v3 v3.0.1
|
||||||
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
|
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/spf13/cobra v1.10.1
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/spf13/viper v1.21.0
|
github.com/spf13/viper v1.21.0
|
||||||
github.com/stretchr/testify v1.11.1
|
github.com/stretchr/testify v1.11.1
|
||||||
github.com/tetratelabs/wazero v1.10.1
|
github.com/tetratelabs/wazero v1.11.0
|
||||||
github.com/unrolled/secure v1.17.0
|
github.com/unrolled/secure v1.17.0
|
||||||
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
|
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
|
||||||
go.uber.org/goleak v1.3.0
|
go.uber.org/goleak v1.3.0
|
||||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9
|
||||||
golang.org/x/image v0.33.0
|
golang.org/x/image v0.34.0
|
||||||
golang.org/x/net v0.47.0
|
golang.org/x/net v0.48.0
|
||||||
golang.org/x/sync v0.18.0
|
golang.org/x/sync v0.19.0
|
||||||
golang.org/x/sys v0.38.0
|
golang.org/x/sys v0.39.0
|
||||||
golang.org/x/text v0.31.0
|
golang.org/x/term v0.38.0
|
||||||
|
golang.org/x/text v0.32.0
|
||||||
golang.org/x/time v0.14.0
|
golang.org/x/time v0.14.0
|
||||||
google.golang.org/protobuf v1.36.10
|
google.golang.org/protobuf v1.36.11
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -90,7 +91,7 @@ require (
|
|||||||
github.com/goccy/go-json v0.10.5 // indirect
|
github.com/goccy/go-json v0.10.5 // indirect
|
||||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||||
github.com/google/go-cmp v0.7.0 // indirect
|
github.com/google/go-cmp v0.7.0 // indirect
|
||||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 // indirect
|
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f // indirect
|
||||||
github.com/google/subcommands v1.2.0 // indirect
|
github.com/google/subcommands v1.2.0 // indirect
|
||||||
github.com/gorilla/css v1.0.1 // indirect
|
github.com/gorilla/css v1.0.1 // indirect
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
@ -128,10 +129,10 @@ require (
|
|||||||
go.uber.org/multierr v1.11.0 // indirect
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||||
golang.org/x/crypto v0.45.0 // indirect
|
golang.org/x/crypto v0.46.0 // indirect
|
||||||
golang.org/x/mod v0.30.0 // indirect
|
golang.org/x/mod v0.31.0 // indirect
|
||||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 // indirect
|
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc // indirect
|
||||||
golang.org/x/tools v0.39.0 // indirect
|
golang.org/x/tools v0.40.0 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
|
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
70
go.sum
70
go.sum
@ -99,8 +99,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
|||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
|
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
|
||||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
|
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
|
||||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 h1:3DsUAV+VNEQa2CUVLxCY3f87278uWfIDhJnbdvDjvmE=
|
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f h1:HU1RgM6NALf/KW9HEY6zry3ADbDKcmpQ+hJedoNGQYQ=
|
||||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
|
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f/go.mod h1:67FPmZWbr+KDT/VlpWtw6sO9XSjpJmLuHpoLmWiTGgY=
|
||||||
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
|
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
|
||||||
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
|
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
@ -162,8 +162,8 @@ github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVf
|
|||||||
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
|
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
|
||||||
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
|
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
|
||||||
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
|
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
|
||||||
github.com/maruel/natural v1.2.1 h1:G/y4pwtTA07lbQsMefvsmEO0VN0NfqpxprxXDM4R/4o=
|
github.com/maruel/natural v1.3.0 h1:VsmCsBmEyrR46RomtgHs5hbKADGRVtliHTyCOLFBpsg=
|
||||||
github.com/maruel/natural v1.2.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
github.com/maruel/natural v1.3.0/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
||||||
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
|
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
|
||||||
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
|
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
@ -186,10 +186,10 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
|
|||||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
github.com/ogier/pflag v0.0.1 h1:RW6JSWSu/RkSatfcLtogGfFgpim5p7ARQ10ECk5O750=
|
github.com/ogier/pflag v0.0.1 h1:RW6JSWSu/RkSatfcLtogGfFgpim5p7ARQ10ECk5O750=
|
||||||
github.com/ogier/pflag v0.0.1/go.mod h1:zkFki7tvTa0tafRvTBIZTvzYyAu6kQhPZFnshFFPE+g=
|
github.com/ogier/pflag v0.0.1/go.mod h1:zkFki7tvTa0tafRvTBIZTvzYyAu6kQhPZFnshFFPE+g=
|
||||||
github.com/onsi/ginkgo/v2 v2.27.2 h1:LzwLj0b89qtIy6SSASkzlNvX6WktqurSHwkk2ipF/Ns=
|
github.com/onsi/ginkgo/v2 v2.27.3 h1:ICsZJ8JoYafeXFFlFAG75a7CxMsJHwgKwtO+82SE9L8=
|
||||||
github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
github.com/onsi/ginkgo/v2 v2.27.3/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||||
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
|
github.com/onsi/gomega v1.38.3 h1:eTX+W6dobAYfFeGC2PV6RwXRu/MyT+cQguijutvkpSM=
|
||||||
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
|
github.com/onsi/gomega v1.38.3/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
@ -244,8 +244,8 @@ github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
|||||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
@ -265,8 +265,8 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
|
|||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||||
github.com/tetratelabs/wazero v1.10.1 h1:2DugeJf6VVk58KTPszlNfeeN8AhhpwcZqkJj2wwFuH8=
|
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
|
||||||
github.com/tetratelabs/wazero v1.10.1/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
|
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
|
||||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||||
@ -298,20 +298,20 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
|||||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6 h1:zfMcR1Cs4KNuomFFgGefv5N0czO2XZpUbxGUy8i8ug0=
|
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 h1:MDfG8Cvcqlt9XXrmEiD4epKn7VJHZO84hejP9Jmp0MM=
|
||||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
|
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
|
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
|
||||||
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
|
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
@ -323,8 +323,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
|||||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
@ -332,8 +332,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
|||||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
@ -350,11 +350,11 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo=
|
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc h1:bH6xUXay0AIFMElXG2rQ4uiE+7ncwtiOdPfYK1NK2XA=
|
||||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
|
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
@ -363,6 +363,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
|||||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||||
|
golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q=
|
||||||
|
golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
@ -373,8 +375,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
|||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
@ -384,12 +386,12 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
|||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
|
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
|
|||||||
@ -38,6 +38,7 @@ type DataStore interface {
|
|||||||
User(ctx context.Context) UserRepository
|
User(ctx context.Context) UserRepository
|
||||||
UserProps(ctx context.Context) UserPropsRepository
|
UserProps(ctx context.Context) UserPropsRepository
|
||||||
ScrobbleBuffer(ctx context.Context) ScrobbleBufferRepository
|
ScrobbleBuffer(ctx context.Context) ScrobbleBufferRepository
|
||||||
|
Scrobble(ctx context.Context) ScrobbleRepository
|
||||||
|
|
||||||
Resource(ctx context.Context, model interface{}) ResourceRepository
|
Resource(ctx context.Context, model interface{}) ResourceRepository
|
||||||
|
|
||||||
|
|||||||
13
model/scrobble.go
Normal file
13
model/scrobble.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
type Scrobble struct {
|
||||||
|
MediaFileID string
|
||||||
|
UserID string
|
||||||
|
SubmissionTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrobbleRepository interface {
|
||||||
|
RecordScrobble(mediaFileID string, submissionTime time.Time) error
|
||||||
|
}
|
||||||
@ -42,7 +42,9 @@ func (u User) HasLibraryAccess(libraryID int) bool {
|
|||||||
type Users []User
|
type Users []User
|
||||||
|
|
||||||
type UserRepository interface {
|
type UserRepository interface {
|
||||||
|
ResourceRepository
|
||||||
CountAll(...QueryOptions) (int64, error)
|
CountAll(...QueryOptions) (int64, error)
|
||||||
|
Delete(id string) error
|
||||||
Get(id string) (*User, error)
|
Get(id string) (*User, error)
|
||||||
Put(*User) error
|
Put(*User) error
|
||||||
UpdateLastLoginAt(id string) error
|
UpdateLastLoginAt(id string) error
|
||||||
|
|||||||
@ -512,6 +512,70 @@ var _ = Describe("AlbumRepository", func() {
|
|||||||
// Clean up the test album created for this test
|
// Clean up the test album created for this test
|
||||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
It("removes stale role associations when artist role changes", func() {
|
||||||
|
// Regression test for issue #4242: Composers displayed in albumartist list
|
||||||
|
// This happens when an artist's role changes (e.g., was both albumartist and composer,
|
||||||
|
// now only composer) and the old role association isn't properly removed.
|
||||||
|
|
||||||
|
// Create an artist that will have changing roles
|
||||||
|
artist := &model.Artist{
|
||||||
|
ID: "role-change-artist-1",
|
||||||
|
Name: "Role Change Artist",
|
||||||
|
OrderArtistName: "role change artist",
|
||||||
|
}
|
||||||
|
err := createArtistWithLibrary(artistRepo, artist, 1)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
// Create album with artist as both albumartist and composer
|
||||||
|
album := &model.Album{
|
||||||
|
LibraryID: 1,
|
||||||
|
ID: "test-album-role-change",
|
||||||
|
Name: "Test Album Role Change",
|
||||||
|
AlbumArtistID: "role-change-artist-1",
|
||||||
|
AlbumArtist: "Role Change Artist",
|
||||||
|
Participants: model.Participants{
|
||||||
|
model.RoleAlbumArtist: {
|
||||||
|
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||||
|
},
|
||||||
|
model.RoleComposer: {
|
||||||
|
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err = albumRepo.Put(album)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
// Verify initial state: artist has both albumartist and composer roles
|
||||||
|
expected := []albumArtistRecord{
|
||||||
|
{ArtistID: "role-change-artist-1", Role: "albumartist", SubRole: ""},
|
||||||
|
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
|
||||||
|
}
|
||||||
|
verifyAlbumArtists(album.ID, expected)
|
||||||
|
|
||||||
|
// Now update album so artist is ONLY a composer (remove albumartist role)
|
||||||
|
album.Participants = model.Participants{
|
||||||
|
model.RoleComposer: {
|
||||||
|
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err = albumRepo.Put(album)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
// Verify that the albumartist role was removed - only composer should remain
|
||||||
|
// This is the key test: before the fix, the albumartist role would remain
|
||||||
|
// causing composers to appear in the albumartist filter
|
||||||
|
expectedAfter := []albumArtistRecord{
|
||||||
|
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
|
||||||
|
}
|
||||||
|
verifyAlbumArtists(album.ID, expectedAfter)
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
_, _ = artistRepo.executeSQL(squirrel.Delete("artist").Where(squirrel.Eq{"id": artist.ID}))
|
||||||
|
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@ -95,45 +95,82 @@ func (r folderRepository) CountAll(opt ...model.QueryOptions) (int64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...string) (map[string]model.FolderUpdateInfo, error) {
|
func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...string) (map[string]model.FolderUpdateInfo, error) {
|
||||||
|
// If no specific paths, return all folders in the library
|
||||||
|
if len(targetPaths) == 0 {
|
||||||
|
return r.getFolderUpdateInfoAll(lib)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if any path is root (return all folders)
|
||||||
|
for _, targetPath := range targetPaths {
|
||||||
|
if targetPath == "" || targetPath == "." {
|
||||||
|
return r.getFolderUpdateInfoAll(lib)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process paths in batches to avoid SQLite's expression tree depth limit (max 1000).
|
||||||
|
// Each path generates ~3 conditions, so batch size of 100 keeps us well under the limit.
|
||||||
|
const batchSize = 100
|
||||||
|
result := make(map[string]model.FolderUpdateInfo)
|
||||||
|
|
||||||
|
for batch := range slices.Chunk(targetPaths, batchSize) {
|
||||||
|
batchResult, err := r.getFolderUpdateInfoBatch(lib, batch)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for id, info := range batchResult {
|
||||||
|
result[id] = info
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getFolderUpdateInfoAll returns update info for all non-missing folders in the library
|
||||||
|
func (r folderRepository) getFolderUpdateInfoAll(lib model.Library) (map[string]model.FolderUpdateInfo, error) {
|
||||||
|
where := And{
|
||||||
|
Eq{"library_id": lib.ID},
|
||||||
|
Eq{"missing": false},
|
||||||
|
}
|
||||||
|
return r.queryFolderUpdateInfo(where)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getFolderUpdateInfoBatch returns update info for a batch of target paths and their descendants
|
||||||
|
func (r folderRepository) getFolderUpdateInfoBatch(lib model.Library, targetPaths []string) (map[string]model.FolderUpdateInfo, error) {
|
||||||
where := And{
|
where := And{
|
||||||
Eq{"library_id": lib.ID},
|
Eq{"library_id": lib.ID},
|
||||||
Eq{"missing": false},
|
Eq{"missing": false},
|
||||||
}
|
}
|
||||||
|
|
||||||
// If specific paths are requested, include those folders and all their descendants
|
// Collect folder IDs for exact target folders and path conditions for descendants
|
||||||
if len(targetPaths) > 0 {
|
folderIDs := make([]string, 0, len(targetPaths))
|
||||||
// Collect folder IDs for exact target folders and path conditions for descendants
|
pathConditions := make(Or, 0, len(targetPaths)*2)
|
||||||
folderIDs := make([]string, 0, len(targetPaths))
|
|
||||||
pathConditions := make(Or, 0, len(targetPaths)*2)
|
|
||||||
|
|
||||||
for _, targetPath := range targetPaths {
|
for _, targetPath := range targetPaths {
|
||||||
if targetPath == "" || targetPath == "." {
|
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
|
||||||
// Root path - include everything in this library
|
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
|
||||||
pathConditions = Or{}
|
cleanPath = filepath.Clean(cleanPath)
|
||||||
folderIDs = nil
|
|
||||||
break
|
|
||||||
}
|
|
||||||
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
|
|
||||||
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
|
|
||||||
cleanPath = filepath.Clean(cleanPath)
|
|
||||||
|
|
||||||
// Include the target folder itself by ID
|
// Include the target folder itself by ID
|
||||||
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
|
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
|
||||||
|
|
||||||
// Include all descendants: folders whose path field equals or starts with the target path
|
// Include all descendants: folders whose path field equals or starts with the target path
|
||||||
// Note: Folder.Path is the directory path, so children have path = targetPath
|
// Note: Folder.Path is the directory path, so children have path = targetPath
|
||||||
pathConditions = append(pathConditions, Eq{"path": cleanPath})
|
pathConditions = append(pathConditions, Eq{"path": cleanPath})
|
||||||
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
|
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
|
||||||
}
|
|
||||||
|
|
||||||
// Combine conditions: exact folder IDs OR descendant path patterns
|
|
||||||
if len(folderIDs) > 0 {
|
|
||||||
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
|
|
||||||
} else if len(pathConditions) > 0 {
|
|
||||||
where = append(where, pathConditions)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Combine conditions: exact folder IDs OR descendant path patterns
|
||||||
|
if len(folderIDs) > 0 {
|
||||||
|
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
|
||||||
|
} else if len(pathConditions) > 0 {
|
||||||
|
where = append(where, pathConditions)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.queryFolderUpdateInfo(where)
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryFolderUpdateInfo executes the query and returns the result map
|
||||||
|
func (r folderRepository) queryFolderUpdateInfo(where And) (map[string]model.FolderUpdateInfo, error) {
|
||||||
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(where)
|
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(where)
|
||||||
var res []struct {
|
var res []struct {
|
||||||
ID string
|
ID string
|
||||||
|
|||||||
@ -4,6 +4,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"slices"
|
"slices"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -193,12 +195,43 @@ func (r *mediaFileRepository) GetCursor(options ...model.QueryOptions) (model.Me
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindByPaths finds media files by their paths.
|
||||||
|
// The paths can be library-qualified (format: "libraryID:path") or unqualified ("path").
|
||||||
|
// Library-qualified paths search within the specified library, while unqualified paths
|
||||||
|
// search across all libraries for backward compatibility.
|
||||||
func (r *mediaFileRepository) FindByPaths(paths []string) (model.MediaFiles, error) {
|
func (r *mediaFileRepository) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||||
sel := r.newSelect().Columns("*").Where(Eq{"path collate nocase": paths})
|
query := Or{}
|
||||||
|
|
||||||
|
for _, path := range paths {
|
||||||
|
parts := strings.SplitN(path, ":", 2)
|
||||||
|
if len(parts) == 2 {
|
||||||
|
// Library-qualified path: "libraryID:path"
|
||||||
|
libraryID, err := strconv.Atoi(parts[0])
|
||||||
|
if err != nil {
|
||||||
|
// Invalid format, skip
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
relativePath := parts[1]
|
||||||
|
query = append(query, And{
|
||||||
|
Eq{"path collate nocase": relativePath},
|
||||||
|
Eq{"library_id": libraryID},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// Unqualified path: search across all libraries
|
||||||
|
query = append(query, Eq{"path collate nocase": path})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(query) == 0 {
|
||||||
|
return model.MediaFiles{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sel := r.newSelect().Columns("*").Where(query)
|
||||||
var res dbMediaFiles
|
var res dbMediaFiles
|
||||||
if err := r.queryAll(sel, &res); err != nil {
|
if err := r.queryAll(sel, &res); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return res.toModels(), nil
|
return res.toModels(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -89,6 +89,10 @@ func (s *SQLStore) ScrobbleBuffer(ctx context.Context) model.ScrobbleBufferRepos
|
|||||||
return NewScrobbleBufferRepository(ctx, s.getDBXBuilder())
|
return NewScrobbleBufferRepository(ctx, s.getDBXBuilder())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *SQLStore) Scrobble(ctx context.Context) model.ScrobbleRepository {
|
||||||
|
return NewScrobbleRepository(ctx, s.getDBXBuilder())
|
||||||
|
}
|
||||||
|
|
||||||
func (s *SQLStore) Resource(ctx context.Context, m interface{}) model.ResourceRepository {
|
func (s *SQLStore) Resource(ctx context.Context, m interface{}) model.ResourceRepository {
|
||||||
switch m.(type) {
|
switch m.(type) {
|
||||||
case model.User:
|
case model.User:
|
||||||
|
|||||||
34
persistence/scrobble_repository.go
Normal file
34
persistence/scrobble_repository.go
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package persistence
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
. "github.com/Masterminds/squirrel"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/pocketbase/dbx"
|
||||||
|
)
|
||||||
|
|
||||||
|
type scrobbleRepository struct {
|
||||||
|
sqlRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewScrobbleRepository(ctx context.Context, db dbx.Builder) model.ScrobbleRepository {
|
||||||
|
r := &scrobbleRepository{}
|
||||||
|
r.ctx = ctx
|
||||||
|
r.db = db
|
||||||
|
r.tableName = "scrobbles"
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *scrobbleRepository) RecordScrobble(mediaFileID string, submissionTime time.Time) error {
|
||||||
|
userID := loggedUser(r.ctx).ID
|
||||||
|
values := map[string]interface{}{
|
||||||
|
"media_file_id": mediaFileID,
|
||||||
|
"user_id": userID,
|
||||||
|
"submission_time": submissionTime.Unix(),
|
||||||
|
}
|
||||||
|
insert := Insert(r.tableName).SetMap(values)
|
||||||
|
_, err := r.executeSQL(insert)
|
||||||
|
return err
|
||||||
|
}
|
||||||
84
persistence/scrobble_repository_test.go
Normal file
84
persistence/scrobble_repository_test.go
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
package persistence
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/model/id"
|
||||||
|
"github.com/navidrome/navidrome/model/request"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/pocketbase/dbx"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("ScrobbleRepository", func() {
|
||||||
|
var repo model.ScrobbleRepository
|
||||||
|
var rawRepo sqlRepository
|
||||||
|
var ctx context.Context
|
||||||
|
var fileID string
|
||||||
|
var userID string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
fileID = id.NewRandom()
|
||||||
|
userID = id.NewRandom()
|
||||||
|
ctx = request.WithUser(log.NewContext(GinkgoT().Context()), model.User{ID: userID, UserName: "johndoe", IsAdmin: true})
|
||||||
|
db := GetDBXBuilder()
|
||||||
|
repo = NewScrobbleRepository(ctx, db)
|
||||||
|
|
||||||
|
rawRepo = sqlRepository{
|
||||||
|
ctx: ctx,
|
||||||
|
tableName: "scrobbles",
|
||||||
|
db: db,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
_, _ = rawRepo.db.Delete("scrobbles", dbx.HashExp{"media_file_id": fileID}).Execute()
|
||||||
|
_, _ = rawRepo.db.Delete("media_file", dbx.HashExp{"id": fileID}).Execute()
|
||||||
|
_, _ = rawRepo.db.Delete("user", dbx.HashExp{"id": userID}).Execute()
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("RecordScrobble", func() {
|
||||||
|
It("records a scrobble event", func() {
|
||||||
|
submissionTime := time.Now().UTC()
|
||||||
|
|
||||||
|
// Insert User
|
||||||
|
_, err := rawRepo.db.Insert("user", dbx.Params{
|
||||||
|
"id": userID,
|
||||||
|
"user_name": "user",
|
||||||
|
"password": "pw",
|
||||||
|
"created_at": time.Now(),
|
||||||
|
"updated_at": time.Now(),
|
||||||
|
}).Execute()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
// Insert MediaFile
|
||||||
|
_, err = rawRepo.db.Insert("media_file", dbx.Params{
|
||||||
|
"id": fileID,
|
||||||
|
"path": "path",
|
||||||
|
"created_at": time.Now(),
|
||||||
|
"updated_at": time.Now(),
|
||||||
|
}).Execute()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
err = repo.RecordScrobble(fileID, submissionTime)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
// Verify insertion
|
||||||
|
var scrobble struct {
|
||||||
|
MediaFileID string `db:"media_file_id"`
|
||||||
|
UserID string `db:"user_id"`
|
||||||
|
SubmissionTime int64 `db:"submission_time"`
|
||||||
|
}
|
||||||
|
err = rawRepo.db.Select("*").From("scrobbles").
|
||||||
|
Where(dbx.HashExp{"media_file_id": fileID, "user_id": userID}).
|
||||||
|
One(&scrobble)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(scrobble.MediaFileID).To(Equal(fileID))
|
||||||
|
Expect(scrobble.UserID).To(Equal(userID))
|
||||||
|
Expect(scrobble.SubmissionTime).To(Equal(submissionTime.Unix()))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -51,8 +51,10 @@ func unmarshalParticipants(data string) (model.Participants, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r sqlRepository) updateParticipants(itemID string, participants model.Participants) error {
|
func (r sqlRepository) updateParticipants(itemID string, participants model.Participants) error {
|
||||||
ids := participants.AllIDs()
|
// Delete all existing participant entries for this item.
|
||||||
sqd := Delete(r.tableName + "_artists").Where(And{Eq{r.tableName + "_id": itemID}, NotEq{"artist_id": ids}})
|
// This ensures stale role associations are removed when an artist's role changes
|
||||||
|
// (e.g., an artist was both albumartist and composer, but is now only composer).
|
||||||
|
sqd := Delete(r.tableName + "_artists").Where(Eq{r.tableName + "_id": itemID})
|
||||||
_, err := r.executeSQL(sqd)
|
_, err := r.executeSQL(sqd)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package plugins
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/navidrome/navidrome/conf"
|
"github.com/navidrome/navidrome/conf"
|
||||||
"github.com/navidrome/navidrome/conf/configtest"
|
"github.com/navidrome/navidrome/conf/configtest"
|
||||||
@ -23,6 +24,7 @@ var _ = Describe("Adapter Media Agent", func() {
|
|||||||
// Ensure plugins folder is set to testdata
|
// Ensure plugins folder is set to testdata
|
||||||
DeferCleanup(configtest.SetupConfig())
|
DeferCleanup(configtest.SetupConfig())
|
||||||
conf.Server.Plugins.Folder = testDataDir
|
conf.Server.Plugins.Folder = testDataDir
|
||||||
|
conf.Server.DevPluginCompilationTimeout = 2 * time.Minute
|
||||||
|
|
||||||
mgr = createManager(nil, metrics.NewNoopInstance())
|
mgr = createManager(nil, metrics.NewNoopInstance())
|
||||||
mgr.ScanPlugins()
|
mgr.ScanPlugins()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -27,15 +27,16 @@
|
|||||||
"playDate": "Laste Ludita",
|
"playDate": "Laste Ludita",
|
||||||
"channels": "Kanaloj",
|
"channels": "Kanaloj",
|
||||||
"createdAt": "Dato de aligo",
|
"createdAt": "Dato de aligo",
|
||||||
"grouping": "",
|
"grouping": "Grupo",
|
||||||
"mood": "Humoro",
|
"mood": "Humoro",
|
||||||
"participants": "",
|
"participants": "Aldonaj partoprenantoj",
|
||||||
"tags": "Aldonaj Etikedoj",
|
"tags": "Aldonaj Etikedoj",
|
||||||
"mappedTags": "Mapigitaj etikedoj",
|
"mappedTags": "Mapigitaj etikedoj",
|
||||||
"rawTags": "Krudaj etikedoj",
|
"rawTags": "Krudaj etikedoj",
|
||||||
"bitDepth": "",
|
"bitDepth": "Bitprofundo",
|
||||||
"sampleRate": "",
|
"sampleRate": "Elprena rapido",
|
||||||
"missing": ""
|
"missing": "Mankaj",
|
||||||
|
"libraryName": "Biblioteko"
|
||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"addToQueue": "Ludi Poste",
|
"addToQueue": "Ludi Poste",
|
||||||
@ -44,7 +45,8 @@
|
|||||||
"shuffleAll": "Miksu Ĉiujn",
|
"shuffleAll": "Miksu Ĉiujn",
|
||||||
"download": "Elŝuti",
|
"download": "Elŝuti",
|
||||||
"playNext": "Ludu Poste",
|
"playNext": "Ludu Poste",
|
||||||
"info": "Akiri Informon"
|
"info": "Akiri Informon",
|
||||||
|
"showInPlaylist": "Montri en Ludlisto"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"album": {
|
"album": {
|
||||||
@ -68,14 +70,15 @@
|
|||||||
"releaseDate": "Publikiĝis",
|
"releaseDate": "Publikiĝis",
|
||||||
"releases": "Publikiĝo |||| Publikiĝoj",
|
"releases": "Publikiĝo |||| Publikiĝoj",
|
||||||
"released": "Publikiĝis",
|
"released": "Publikiĝis",
|
||||||
"recordLabel": "",
|
"recordLabel": "Eldonejo",
|
||||||
"catalogNum": "",
|
"catalogNum": "Kataloga Numero",
|
||||||
"releaseType": "Tipo",
|
"releaseType": "Tipo",
|
||||||
"grouping": "",
|
"grouping": "Grupo",
|
||||||
"media": "",
|
"media": "Aŭdvidaĵo",
|
||||||
"mood": "Humoro",
|
"mood": "Humoro",
|
||||||
"date": "",
|
"date": "Registraĵa Dato",
|
||||||
"missing": ""
|
"missing": "Mankaj",
|
||||||
|
"libraryName": "Biblioteko"
|
||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"playAll": "Ludi",
|
"playAll": "Ludi",
|
||||||
@ -107,8 +110,8 @@
|
|||||||
"rating": "Takso",
|
"rating": "Takso",
|
||||||
"genre": "Ĝenro",
|
"genre": "Ĝenro",
|
||||||
"size": "Grando",
|
"size": "Grando",
|
||||||
"role": "",
|
"role": "Rolo",
|
||||||
"missing": ""
|
"missing": "Mankaj"
|
||||||
},
|
},
|
||||||
"roles": {
|
"roles": {
|
||||||
"albumartist": "Albuma Artisto |||| Albumaj Artistoj",
|
"albumartist": "Albuma Artisto |||| Albumaj Artistoj",
|
||||||
@ -117,13 +120,19 @@
|
|||||||
"conductor": "Dirigento |||| Dirigentoj",
|
"conductor": "Dirigento |||| Dirigentoj",
|
||||||
"lyricist": "Kantoteksisto |||| Kantotekstistoj",
|
"lyricist": "Kantoteksisto |||| Kantotekstistoj",
|
||||||
"arranger": "Aranĝisto |||| Aranĝistoj",
|
"arranger": "Aranĝisto |||| Aranĝistoj",
|
||||||
"producer": "",
|
"producer": "Produktisto |||| Produktistoj",
|
||||||
"director": "",
|
"director": "Direktoro |||| Direktoroj",
|
||||||
"engineer": "",
|
"engineer": "Inĝeniero |||| Inĝenieroj",
|
||||||
"mixer": "Miksisto |||| Miksistoj",
|
"mixer": "Miksisto |||| Miksistoj",
|
||||||
"remixer": "Remiksisto |||| Remiksistoj",
|
"remixer": "Remiksisto |||| Remiksistoj",
|
||||||
"djmixer": "",
|
"djmixer": "Dĵ-a Miksisto |||| Dĵ-a Miksistoj",
|
||||||
"performer": ""
|
"performer": "Plenumisto |||| Plenumistoj",
|
||||||
|
"maincredit": "Albuma Artisto aŭ Artisto |||| Albumaj Artistoj aŭ Artistoj"
|
||||||
|
},
|
||||||
|
"actions": {
|
||||||
|
"shuffle": "Miksi",
|
||||||
|
"radio": "Radio",
|
||||||
|
"topSongs": "Plej Luditaj Kantoj"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"user": {
|
"user": {
|
||||||
@ -140,10 +149,12 @@
|
|||||||
"currentPassword": "Nuna Pasvorto",
|
"currentPassword": "Nuna Pasvorto",
|
||||||
"newPassword": "Nova Pasvorto",
|
"newPassword": "Nova Pasvorto",
|
||||||
"token": "Ĵetono",
|
"token": "Ĵetono",
|
||||||
"lastAccessAt": "Lasta Atingo"
|
"lastAccessAt": "Lasta Atingo",
|
||||||
|
"libraries": "Bibliotekoj"
|
||||||
},
|
},
|
||||||
"helperTexts": {
|
"helperTexts": {
|
||||||
"name": "Ŝanĝoj de via nomo nur ĝisdatiĝs je via sekvanta ensaluto"
|
"name": "Ŝanĝoj de via nomo nur ĝisdatiĝs je via sekvanta ensaluto",
|
||||||
|
"libraries": "Elekti specifajn bibliotekojn por ĉi tiu uzanto, aŭ lasi malplena por uzi defaŭltajn bibliotekojn"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"created": "Uzanto farita",
|
"created": "Uzanto farita",
|
||||||
@ -152,7 +163,12 @@
|
|||||||
},
|
},
|
||||||
"message": {
|
"message": {
|
||||||
"listenBrainzToken": "Enigi vian uzantan ĵetonon de ListenBrainz.",
|
"listenBrainzToken": "Enigi vian uzantan ĵetonon de ListenBrainz.",
|
||||||
"clickHereForToken": "Alkakli ĉi tie por akiri vian ĵetonon"
|
"clickHereForToken": "Alkakli ĉi tie por akiri vian ĵetonon",
|
||||||
|
"selectAllLibraries": "Elekti ĉiujn bibliotekojn",
|
||||||
|
"adminAutoLibraries": "Administrantoj aŭtomate havas aliron al ĉiuj bibliotekoj"
|
||||||
|
},
|
||||||
|
"validation": {
|
||||||
|
"librariesRequired": "Almenaŭ unu biblioteko devas esti elektita por neadministrantoj"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"player": {
|
"player": {
|
||||||
@ -197,11 +213,16 @@
|
|||||||
"export": "Eksporti",
|
"export": "Eksporti",
|
||||||
"makePublic": "Publikigi",
|
"makePublic": "Publikigi",
|
||||||
"makePrivate": "Malpublikigi",
|
"makePrivate": "Malpublikigi",
|
||||||
"saveQueue": ""
|
"saveQueue": "Konservi Ludvicon al Ludlisto",
|
||||||
|
"searchOrCreate": "Serĉi ludlistojn aŭ tajpi por krei novan...",
|
||||||
|
"pressEnterToCreate": "Premu je Enter por krei novan ludliston",
|
||||||
|
"removeFromSelection": "Forigi de elekto"
|
||||||
},
|
},
|
||||||
"message": {
|
"message": {
|
||||||
"duplicate_song": "Aldoni duobligitajn kantojn",
|
"duplicate_song": "Aldoni duobligitajn kantojn",
|
||||||
"song_exist": "Estas duoblaĵoj kiuj aldoniĝas al la kantolisto. Ĉu vi ŝatus aldoni la duoblaĵojn aŭ pasigi ilin?"
|
"song_exist": "Estas duoblaĵoj kiuj aldoniĝas al la kantolisto. Ĉu vi ŝatus aldoni la duoblaĵojn aŭ pasigi ilin?",
|
||||||
|
"noPlaylistsFound": "Neniuj ludlistoj trovitaj",
|
||||||
|
"noPlaylists": "Neniuj ludlistoj haveblaj"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"radio": {
|
"radio": {
|
||||||
@ -235,20 +256,78 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"missing": {
|
"missing": {
|
||||||
"name": "",
|
"name": "Manka Dosiero |||| Mankaj Dosieroj",
|
||||||
"fields": {
|
"fields": {
|
||||||
"path": "",
|
"path": "Vojo",
|
||||||
"size": "",
|
"size": "Grando",
|
||||||
"updatedAt": ""
|
"updatedAt": "Malaperis je",
|
||||||
|
"libraryName": "Biblioteko"
|
||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"remove": "",
|
"remove": "Forigi",
|
||||||
"remove_all": ""
|
"remove_all": "Forigi Ĉiujn"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"removed": ""
|
"removed": "Manka(j) dosiero(j) forigite"
|
||||||
},
|
},
|
||||||
"empty": ""
|
"empty": "Neniuj Mankaj Dosieroj"
|
||||||
|
},
|
||||||
|
"library": {
|
||||||
|
"name": "Biblioteko |||| Bibliotekoj",
|
||||||
|
"fields": {
|
||||||
|
"name": "Nomo",
|
||||||
|
"path": "Vojo",
|
||||||
|
"remotePath": "Fora Vojo",
|
||||||
|
"lastScanAt": "Plej Lasta Skano",
|
||||||
|
"songCount": "Kantoj",
|
||||||
|
"albumCount": "Albumoj",
|
||||||
|
"artistCount": "Artistoj",
|
||||||
|
"totalSongs": "Kantoj",
|
||||||
|
"totalAlbums": "Albumoj",
|
||||||
|
"totalArtists": "Artistoj",
|
||||||
|
"totalFolders": "Dosierujoj",
|
||||||
|
"totalFiles": "Dosieroj",
|
||||||
|
"totalMissingFiles": "Mankaj Dosieroj",
|
||||||
|
"totalSize": "Totala Grando",
|
||||||
|
"totalDuration": "Daŭro",
|
||||||
|
"defaultNewUsers": "Defaŭlto por Novaj Uzantoj",
|
||||||
|
"createdAt": "Farite je",
|
||||||
|
"updatedAt": "Ĝisdatiĝis je"
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"basic": "Bazaj Informoj",
|
||||||
|
"statistics": "Statistikaĵoj"
|
||||||
|
},
|
||||||
|
"actions": {
|
||||||
|
"scan": "Skani Bibliotekon",
|
||||||
|
"manageUsers": "Agordi Uzantan Aliron",
|
||||||
|
"viewDetails": "Montri Informojn",
|
||||||
|
"quickScan": "Rapida Skano",
|
||||||
|
"fullScan": "Plena Skano"
|
||||||
|
},
|
||||||
|
"notifications": {
|
||||||
|
"created": "Biblioteko kreiĝis sukcese",
|
||||||
|
"updated": "Biblioteko ĝisdatiĝis sukcese",
|
||||||
|
"deleted": "Biblioteko foriĝis sukcese",
|
||||||
|
"scanStarted": "Biblioteka skano komenciĝis",
|
||||||
|
"scanCompleted": "Biblioteka skano finiĝis",
|
||||||
|
"quickScanStarted": "Rapida skano komenciĝis",
|
||||||
|
"fullScanStarted": "Plena skano komenciĝis",
|
||||||
|
"scanError": "Eraro de skana komenco. Kontrolu la protokolojn"
|
||||||
|
},
|
||||||
|
"validation": {
|
||||||
|
"nameRequired": "Biblioteka nomo estas necesa",
|
||||||
|
"pathRequired": "Biblioteka vojo estas necesa",
|
||||||
|
"pathNotDirectory": "Biblioteka vojo devas esti dosierujo",
|
||||||
|
"pathNotFound": "Biblioteka vojo ne trovite",
|
||||||
|
"pathNotAccessible": "Biblioteka vojo ne estas alirebla",
|
||||||
|
"pathInvalid": "Nevalida biblioteka vojo"
|
||||||
|
},
|
||||||
|
"messages": {
|
||||||
|
"deleteConfirm": "Ĉu vi certas, ke vi volas forigi ĉi tiun bibliotekon? Ĉi tio forigos ĉiujn rilatajn datumojn kaj uzantan aliron.",
|
||||||
|
"scanInProgress": "Skano progresas...",
|
||||||
|
"noLibrariesAssigned": "Neniuj bibliotekoj asignitaj por ĉi tiu uzanto"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ra": {
|
"ra": {
|
||||||
@ -427,10 +506,12 @@
|
|||||||
"shareFailure": "Eraro de kopio de ligilo %{url} al la tondujo",
|
"shareFailure": "Eraro de kopio de ligilo %{url} al la tondujo",
|
||||||
"downloadDialogTitle": "Elŝuti %{resource} '%{name}' (%{size})",
|
"downloadDialogTitle": "Elŝuti %{resource} '%{name}' (%{size})",
|
||||||
"shareCopyToClipboard": "Kopii al la tondujo: Ctrl+C, Enter",
|
"shareCopyToClipboard": "Kopii al la tondujo: Ctrl+C, Enter",
|
||||||
"remove_missing_title": "",
|
"remove_missing_title": "Forigi mankajn dosierojn",
|
||||||
"remove_missing_content": "Ĉu vi certas, ke vi volas forigi la elektitajn mankajn dosierojn de la datumbazo? Ĉi tio forigos eterne ĉiujn referencojn de ili, inkluzive iliajn ludkvantojn kaj taksojn.",
|
"remove_missing_content": "Ĉu vi certas, ke vi volas forigi la elektitajn mankajn dosierojn de la datumbazo? Ĉi tio forigos eterne ĉiujn referencojn de ili, inkluzive iliajn ludkvantojn kaj taksojn.",
|
||||||
"remove_all_missing_title": "",
|
"remove_all_missing_title": "Forigi ĉiujn mankajn dosierojn",
|
||||||
"remove_all_missing_content": ""
|
"remove_all_missing_content": "Ĉu vi certas, ke vi volas forigi ĉiujn mankajn dosierojn de la datumbazo? Ĉi tio permanante forigos ĉiujn referencojn al ili, inkluzive iliajn ludnombrojn kaj taksojn.",
|
||||||
|
"noSimilarSongsFound": "Neniuj similaj kantoj trovitaj",
|
||||||
|
"noTopSongsFound": "Neniuj plej luditaj kantoj trovitaj"
|
||||||
},
|
},
|
||||||
"menu": {
|
"menu": {
|
||||||
"library": "Biblioteko",
|
"library": "Biblioteko",
|
||||||
@ -453,13 +534,19 @@
|
|||||||
"album": "Uzi Albuman Songajnon",
|
"album": "Uzi Albuman Songajnon",
|
||||||
"track": "Uzi Kantan Songajnon"
|
"track": "Uzi Kantan Songajnon"
|
||||||
},
|
},
|
||||||
"lastfmNotConfigured": ""
|
"lastfmNotConfigured": "API-ŝlosilo de Last.fm ne agordita"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"albumList": "Albumoj",
|
"albumList": "Albumoj",
|
||||||
"about": "Pri",
|
"about": "Pri",
|
||||||
"playlists": "Ludlistoj",
|
"playlists": "Ludlistoj",
|
||||||
"sharedPlaylists": "Diskonigitaj Ludistoj"
|
"sharedPlaylists": "Diskonigitaj Ludistoj",
|
||||||
|
"librarySelector": {
|
||||||
|
"allLibraries": "Ĉiuj Bibliotekoj (%{count})",
|
||||||
|
"multipleLibraries": "%{selected} el %{total} Bibliotekoj",
|
||||||
|
"selectLibraries": "Elekti Bibliotekojn",
|
||||||
|
"none": "Neniu"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"player": {
|
"player": {
|
||||||
"playListsText": "Atendovico",
|
"playListsText": "Atendovico",
|
||||||
@ -491,11 +578,26 @@
|
|||||||
"homepage": "Hejmpaĝo",
|
"homepage": "Hejmpaĝo",
|
||||||
"source": "Fontkodo",
|
"source": "Fontkodo",
|
||||||
"featureRequests": "Trajta peto",
|
"featureRequests": "Trajta peto",
|
||||||
"lastInsightsCollection": "",
|
"lastInsightsCollection": "Plej lasta kolekto de datumoj",
|
||||||
"insights": {
|
"insights": {
|
||||||
"disabled": "Malebligita",
|
"disabled": "Malebligita",
|
||||||
"waiting": ""
|
"waiting": "Atendante"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"tabs": {
|
||||||
|
"about": "Pri",
|
||||||
|
"config": "Agordo"
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"configName": "Agorda Nomo",
|
||||||
|
"environmentVariable": "Medivariablo",
|
||||||
|
"currentValue": "Nuna Valoro",
|
||||||
|
"configurationFile": "Agorda Dosiero",
|
||||||
|
"exportToml": "Eksporti Agordojn (TOML)",
|
||||||
|
"exportSuccess": "Agordoj eksportiĝis al la tondujo en TOML-a formato",
|
||||||
|
"exportFailed": "Malsukcesis kopii agordojn",
|
||||||
|
"devFlagsHeader": "Programadaj Flagoj (povas ŝanĝiĝi/foriĝi)",
|
||||||
|
"devFlagsComment": "Ĉi tiuj estas eksperimentaj agordoj kaj eble foriĝos en estontaj versioj"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"activity": {
|
"activity": {
|
||||||
@ -505,9 +607,10 @@
|
|||||||
"fullScan": "Plena Skanado",
|
"fullScan": "Plena Skanado",
|
||||||
"serverUptime": "Servila daŭro de funkciado",
|
"serverUptime": "Servila daŭro de funkciado",
|
||||||
"serverDown": "SENKONEKTA",
|
"serverDown": "SENKONEKTA",
|
||||||
"scanType": "",
|
"scanType": "Plej Lasta Skano",
|
||||||
"status": "",
|
"status": "Skana Eraro",
|
||||||
"elapsedTime": ""
|
"elapsedTime": "Pasinta Tempo",
|
||||||
|
"selectiveScan": "Selektema"
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"title": "Navidrome klavkomando",
|
"title": "Navidrome klavkomando",
|
||||||
@ -519,8 +622,13 @@
|
|||||||
"next_song": "Sekva kanto",
|
"next_song": "Sekva kanto",
|
||||||
"vol_up": "Pli volumo",
|
"vol_up": "Pli volumo",
|
||||||
"vol_down": "Malpli volumo",
|
"vol_down": "Malpli volumo",
|
||||||
"toggle_love": "Baskuli la stelon de nuna kanto",
|
"toggle_love": "Aldoni ĉi tiun kanton al plej ŝatataj",
|
||||||
"current_song": "Iri al Nuna Kanto"
|
"current_song": "Iri al Nuna Kanto"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"nowPlaying": {
|
||||||
|
"title": "Nun Ludanta",
|
||||||
|
"empty": "Nenio ludas",
|
||||||
|
"minutesAgo": "Antaŭ %{smart_count} minuto |||| Antaŭ %{smart_count} minutoj"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -302,6 +302,8 @@
|
|||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"scan": "Arakatu liburutegia",
|
"scan": "Arakatu liburutegia",
|
||||||
|
"quickScan": "Araketa bizkorra",
|
||||||
|
"fullScan": "Araketa sakona",
|
||||||
"manageUsers": "Kudeatu erabiltzaileen sarbidea",
|
"manageUsers": "Kudeatu erabiltzaileen sarbidea",
|
||||||
"viewDetails": "Ikusi xehetasunak"
|
"viewDetails": "Ikusi xehetasunak"
|
||||||
},
|
},
|
||||||
@ -310,6 +312,9 @@
|
|||||||
"updated": "Liburutegia ondo eguneratu da",
|
"updated": "Liburutegia ondo eguneratu da",
|
||||||
"deleted": "Liburutegia ondo ezabatu da",
|
"deleted": "Liburutegia ondo ezabatu da",
|
||||||
"scanStarted": "Liburutegiaren araketa hasi da",
|
"scanStarted": "Liburutegiaren araketa hasi da",
|
||||||
|
"quickScanStarted": "Araketa bizkorra hasi da",
|
||||||
|
"fullScanStarted": "Araketa sakona hasi da",
|
||||||
|
"scanError": "Errorea araketa abiaraztean. Aztertu erregistroak",
|
||||||
"scanCompleted": "Liburutegiaren araketa amaitu da"
|
"scanCompleted": "Liburutegiaren araketa amaitu da"
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": {
|
||||||
@ -459,7 +464,7 @@
|
|||||||
"bad_item": "Elementu okerra",
|
"bad_item": "Elementu okerra",
|
||||||
"item_doesnt_exist": "Elementua ez dago",
|
"item_doesnt_exist": "Elementua ez dago",
|
||||||
"http_error": "Errorea zerbitzariarekin komunikatzerakoan",
|
"http_error": "Errorea zerbitzariarekin komunikatzerakoan",
|
||||||
"data_provider_error": "Errorea datuen hornitzailean. Berrikusi kontsola xehetasun gehiagorako.",
|
"data_provider_error": "Errorea datuen hornitzailean. Aztertu kontsola xehetasun gehiagorako.",
|
||||||
"i18n_error": "Ezin izan dira zehaztutako hizkuntzaren itzulpenak kargatu",
|
"i18n_error": "Ezin izan dira zehaztutako hizkuntzaren itzulpenak kargatu",
|
||||||
"canceled": "Ekintza bertan behera utzi da",
|
"canceled": "Ekintza bertan behera utzi da",
|
||||||
"logged_out": "Saioa amaitu da, konektatu berriro.",
|
"logged_out": "Saioa amaitu da, konektatu berriro.",
|
||||||
@ -600,8 +605,9 @@
|
|||||||
"activity": {
|
"activity": {
|
||||||
"title": "Ekintzak",
|
"title": "Ekintzak",
|
||||||
"totalScanned": "Arakatutako karpeta guztiak",
|
"totalScanned": "Arakatutako karpeta guztiak",
|
||||||
"quickScan": "Arakatze azkarra",
|
"quickScan": "Arakatze bizkorra",
|
||||||
"fullScan": "Arakatze sakona",
|
"fullScan": "Arakatze sakona",
|
||||||
|
"selectiveScan": "Arakatze selektiboa",
|
||||||
"serverUptime": "Zerbitzariak piztuta daraman denbora",
|
"serverUptime": "Zerbitzariak piztuta daraman denbora",
|
||||||
"serverDown": "LINEAZ KANPO",
|
"serverDown": "LINEAZ KANPO",
|
||||||
"scanType": "Mota",
|
"scanType": "Mota",
|
||||||
|
|||||||
@ -31,7 +31,7 @@
|
|||||||
"mood": "Tunnelma",
|
"mood": "Tunnelma",
|
||||||
"participants": "Lisäosallistujat",
|
"participants": "Lisäosallistujat",
|
||||||
"tags": "Lisätunnisteet",
|
"tags": "Lisätunnisteet",
|
||||||
"mappedTags": "Mäpättyt tunnisteet",
|
"mappedTags": "Mäpätyt tunnisteet",
|
||||||
"rawTags": "Raakatunnisteet",
|
"rawTags": "Raakatunnisteet",
|
||||||
"bitDepth": "Bittisyvyys",
|
"bitDepth": "Bittisyvyys",
|
||||||
"sampleRate": "Näytteenottotaajuus",
|
"sampleRate": "Näytteenottotaajuus",
|
||||||
@ -301,14 +301,19 @@
|
|||||||
"actions": {
|
"actions": {
|
||||||
"scan": "Skannaa kirjasto",
|
"scan": "Skannaa kirjasto",
|
||||||
"manageUsers": "Hallitse käyttäjien pääsyä",
|
"manageUsers": "Hallitse käyttäjien pääsyä",
|
||||||
"viewDetails": "Näytä tiedot"
|
"viewDetails": "Näytä tiedot",
|
||||||
|
"quickScan": "Nopea skannaus",
|
||||||
|
"fullScan": "Täysi skannaus"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"created": "Kirjasto luotu onnistuneesti",
|
"created": "Kirjasto luotu onnistuneesti",
|
||||||
"updated": "Kirjasto päivitetty onnistuneesti",
|
"updated": "Kirjasto päivitetty onnistuneesti",
|
||||||
"deleted": "Kirjasto poistettu onnistuneesti",
|
"deleted": "Kirjasto poistettu onnistuneesti",
|
||||||
"scanStarted": "Kirjaston skannaus aloitettu",
|
"scanStarted": "Kirjaston skannaus aloitettu",
|
||||||
"scanCompleted": "Kirjaston skannaus valmistunut"
|
"scanCompleted": "Kirjaston skannaus valmistunut",
|
||||||
|
"quickScanStarted": "Nopea skannaus aloitettu",
|
||||||
|
"fullScanStarted": "Täysi skannaus aloitettu",
|
||||||
|
"scanError": "Virhe skannauksen käynnistyksessä. Tarkista lokit"
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": {
|
||||||
"nameRequired": "Kirjaston nimi vaaditaan",
|
"nameRequired": "Kirjaston nimi vaaditaan",
|
||||||
@ -319,7 +324,7 @@
|
|||||||
"pathInvalid": "Virheellinen kirjaston polku"
|
"pathInvalid": "Virheellinen kirjaston polku"
|
||||||
},
|
},
|
||||||
"messages": {
|
"messages": {
|
||||||
"deleteConfirm": "Oletko varma, että haluat poistaa tämän kirjaston? Tämä poistaa kaikki liittyvät tiedot ja käyttäjien pääsyn.",
|
"deleteConfirm": "Haluatko varmasti poistaa tämän kirjaston? Kaikki siihen liittyvät tiedot ja käyttäjien pääsy poistetaan.",
|
||||||
"scanInProgress": "Skannaus käynnissä...",
|
"scanInProgress": "Skannaus käynnissä...",
|
||||||
"noLibrariesAssigned": "Tälle käyttäjälle ei ole määritetty kirjastoja"
|
"noLibrariesAssigned": "Tälle käyttäjälle ei ole määritetty kirjastoja"
|
||||||
}
|
}
|
||||||
@ -336,7 +341,7 @@
|
|||||||
"username": "Käyttäjänimi",
|
"username": "Käyttäjänimi",
|
||||||
"password": "Salasana",
|
"password": "Salasana",
|
||||||
"sign_in": "Kirjaudu",
|
"sign_in": "Kirjaudu",
|
||||||
"sign_in_error": "Autentikointi epäonnistui. Yritä uudelleen",
|
"sign_in_error": "Kirjautuminen epäonnistui. Yritä uudelleen",
|
||||||
"logout": "Kirjaudu ulos",
|
"logout": "Kirjaudu ulos",
|
||||||
"insightsCollectionNote": "Navidrome kerää anonyymejä käyttötietoja auttaakseen parantamaan\nprojektia. Paina [tästä] saadaksesi lisätietoa\nja halutessasi kieltäytyä"
|
"insightsCollectionNote": "Navidrome kerää anonyymejä käyttötietoja auttaakseen parantamaan\nprojektia. Paina [tästä] saadaksesi lisätietoa\nja halutessasi kieltäytyä"
|
||||||
},
|
},
|
||||||
@ -346,7 +351,7 @@
|
|||||||
"required": "Pakollinen",
|
"required": "Pakollinen",
|
||||||
"minLength": "Pitää vähintään olla %{min} merkkiä",
|
"minLength": "Pitää vähintään olla %{min} merkkiä",
|
||||||
"maxLength": "Saa olla enintään %{max} merkkiä",
|
"maxLength": "Saa olla enintään %{max} merkkiä",
|
||||||
"minValue": "pitää olla vähintään %{min}",
|
"minValue": "Pitää olla vähintään %{min}",
|
||||||
"maxValue": "Saa olla enentään %{max}",
|
"maxValue": "Saa olla enentään %{max}",
|
||||||
"number": "Pitää olla numero",
|
"number": "Pitää olla numero",
|
||||||
"email": "Pitää olla oikea sähköpostiosoite",
|
"email": "Pitää olla oikea sähköpostiosoite",
|
||||||
@ -440,7 +445,7 @@
|
|||||||
},
|
},
|
||||||
"navigation": {
|
"navigation": {
|
||||||
"no_results": "Ei tuloksia",
|
"no_results": "Ei tuloksia",
|
||||||
"no_more_results": "Sivunumero %{page} on rajojen ulkopuolella. Kokeile edellinen sivu.",
|
"no_more_results": "Sivunumeroa %{page} ei löydy. Yritä edellistä sivua.",
|
||||||
"page_out_of_boundaries": "Sivunumero %{page} on rajojen ulkopuolella",
|
"page_out_of_boundaries": "Sivunumero %{page} on rajojen ulkopuolella",
|
||||||
"page_out_from_end": "Viimeinen sivu, ei voi edetä",
|
"page_out_from_end": "Viimeinen sivu, ei voi edetä",
|
||||||
"page_out_from_begin": "Ensimmäinen sivu, ei voi palata",
|
"page_out_from_begin": "Ensimmäinen sivu, ei voi palata",
|
||||||
@ -522,7 +527,7 @@
|
|||||||
"desktop_notifications": "Työpöytäilmoitukset",
|
"desktop_notifications": "Työpöytäilmoitukset",
|
||||||
"lastfmScrobbling": "Kuuntelutottumuksen lähetys Last.fm-palveluun",
|
"lastfmScrobbling": "Kuuntelutottumuksen lähetys Last.fm-palveluun",
|
||||||
"listenBrainzScrobbling": "Kuuntelutottumuksen lähetys ListenBrainz-palveluun",
|
"listenBrainzScrobbling": "Kuuntelutottumuksen lähetys ListenBrainz-palveluun",
|
||||||
"replaygain": "RepleyGain -tila",
|
"replaygain": "ReplayGain -tila",
|
||||||
"preAmp": "ReplayGain esivahvistus (dB)",
|
"preAmp": "ReplayGain esivahvistus (dB)",
|
||||||
"gain": {
|
"gain": {
|
||||||
"none": "Pois käytöstä",
|
"none": "Pois käytöstä",
|
||||||
@ -554,7 +559,7 @@
|
|||||||
"previousTrackText": "Edellinen kappale",
|
"previousTrackText": "Edellinen kappale",
|
||||||
"reloadText": "Päivitä",
|
"reloadText": "Päivitä",
|
||||||
"volumeText": "Äänenvoimakkuus",
|
"volumeText": "Äänenvoimakkuus",
|
||||||
"toggleLyricText": "Toggle lyric",
|
"toggleLyricText": "Näytä/piilota sanat",
|
||||||
"toggleMiniModeText": "Minimoi",
|
"toggleMiniModeText": "Minimoi",
|
||||||
"destroyText": "Poista",
|
"destroyText": "Poista",
|
||||||
"downloadText": "Lataa",
|
"downloadText": "Lataa",
|
||||||
@ -604,7 +609,8 @@
|
|||||||
"serverDown": "SAMMUTETTU",
|
"serverDown": "SAMMUTETTU",
|
||||||
"scanType": "Tyyppi",
|
"scanType": "Tyyppi",
|
||||||
"status": "Skannausvirhe",
|
"status": "Skannausvirhe",
|
||||||
"elapsedTime": "Kulunut aika"
|
"elapsedTime": "Kulunut aika",
|
||||||
|
"selectiveScan": "Valikoiva"
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"title": "Navidrome pikapainikkeet",
|
"title": "Navidrome pikapainikkeet",
|
||||||
@ -612,7 +618,7 @@
|
|||||||
"show_help": "Näytä tämä apuvalikko",
|
"show_help": "Näytä tämä apuvalikko",
|
||||||
"toggle_menu": "Menuvalikko päälle ja pois",
|
"toggle_menu": "Menuvalikko päälle ja pois",
|
||||||
"toggle_play": "Toista / Tauko",
|
"toggle_play": "Toista / Tauko",
|
||||||
"prev_song": "Esellinen kappale",
|
"prev_song": "Edellinen kappale",
|
||||||
"next_song": "Seuraava kappale",
|
"next_song": "Seuraava kappale",
|
||||||
"vol_up": "Kovemmalle",
|
"vol_up": "Kovemmalle",
|
||||||
"vol_down": "Hiljemmalle",
|
"vol_down": "Hiljemmalle",
|
||||||
|
|||||||
@ -301,14 +301,19 @@
|
|||||||
"actions": {
|
"actions": {
|
||||||
"scan": "Escanear Biblioteca",
|
"scan": "Escanear Biblioteca",
|
||||||
"manageUsers": "Xestionar acceso das usuarias",
|
"manageUsers": "Xestionar acceso das usuarias",
|
||||||
"viewDetails": "Ver detalles"
|
"viewDetails": "Ver detalles",
|
||||||
|
"quickScan": "Escaneado rápido",
|
||||||
|
"fullScan": "Escaneado completo"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"created": "Biblioteca creada correctamente",
|
"created": "Biblioteca creada correctamente",
|
||||||
"updated": "Biblioteca actualizada correctamente",
|
"updated": "Biblioteca actualizada correctamente",
|
||||||
"deleted": "Biblioteca eliminada correctamente",
|
"deleted": "Biblioteca eliminada correctamente",
|
||||||
"scanStarted": "Comezou o escaneo da biblioteca",
|
"scanStarted": "Comezou o escaneo da biblioteca",
|
||||||
"scanCompleted": "Completouse o escaneado da biblioteca"
|
"scanCompleted": "Completouse o escaneado da biblioteca",
|
||||||
|
"quickScanStarted": "Iniciado o escaneado rápido",
|
||||||
|
"fullScanStarted": "Iniciado o escaneado completo",
|
||||||
|
"scanError": "Erro ao escanear. Comproba o rexistro"
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": {
|
||||||
"nameRequired": "Requírese un nome para a biblioteca",
|
"nameRequired": "Requírese un nome para a biblioteca",
|
||||||
@ -604,7 +609,8 @@
|
|||||||
"serverDown": "SEN CONEXIÓN",
|
"serverDown": "SEN CONEXIÓN",
|
||||||
"scanType": "Tipo",
|
"scanType": "Tipo",
|
||||||
"status": "Erro de escaneado",
|
"status": "Erro de escaneado",
|
||||||
"elapsedTime": "Tempo transcurrido"
|
"elapsedTime": "Tempo transcurrido",
|
||||||
|
"selectiveScan": "Selectivo"
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"title": "Atallos de Navidrome",
|
"title": "Atallos de Navidrome",
|
||||||
|
|||||||
@ -301,14 +301,19 @@
|
|||||||
"actions": {
|
"actions": {
|
||||||
"scan": "Scan bibliotheek",
|
"scan": "Scan bibliotheek",
|
||||||
"manageUsers": "Beheer gebruikerstoegang",
|
"manageUsers": "Beheer gebruikerstoegang",
|
||||||
"viewDetails": "Bekijk details"
|
"viewDetails": "Bekijk details",
|
||||||
|
"quickScan": "Snelle scan",
|
||||||
|
"fullScan": "Volledige scan"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"created": "Bibliotheek succesvol aangemaakt",
|
"created": "Bibliotheek succesvol aangemaakt",
|
||||||
"updated": "Bibliotheek succesvol bijgewerkt",
|
"updated": "Bibliotheek succesvol bijgewerkt",
|
||||||
"deleted": "Bibliotheek succesvol verwijderd",
|
"deleted": "Bibliotheek succesvol verwijderd",
|
||||||
"scanStarted": "Bibliotheekscan is gestart",
|
"scanStarted": "Bibliotheekscan is gestart",
|
||||||
"scanCompleted": "Bibliotheekscan is voltooid"
|
"scanCompleted": "Bibliotheekscan is voltooid",
|
||||||
|
"quickScanStarted": "Snelle scan gestart",
|
||||||
|
"fullScanStarted": "Volledige scan gestart",
|
||||||
|
"scanError": "Fout bij start van scan. Check de logs"
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": {
|
||||||
"nameRequired": "Bibliotheek naam is vereist",
|
"nameRequired": "Bibliotheek naam is vereist",
|
||||||
@ -604,7 +609,8 @@
|
|||||||
"serverDown": "Offline",
|
"serverDown": "Offline",
|
||||||
"scanType": "Type",
|
"scanType": "Type",
|
||||||
"status": "Scan fout",
|
"status": "Scan fout",
|
||||||
"elapsedTime": "Verlopen tijd"
|
"elapsedTime": "Verlopen tijd",
|
||||||
|
"selectiveScan": "Selectief"
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"title": "Navidrome sneltoetsen",
|
"title": "Navidrome sneltoetsen",
|
||||||
|
|||||||
@ -18,8 +18,6 @@
|
|||||||
"size": "Filstørrelse",
|
"size": "Filstørrelse",
|
||||||
"updatedAt": "Oppdatert",
|
"updatedAt": "Oppdatert",
|
||||||
"bitRate": "Bit rate",
|
"bitRate": "Bit rate",
|
||||||
"bitDepth": "Bit depth",
|
|
||||||
"channels": "Kanaler",
|
|
||||||
"discSubtitle": "Disk Undertittel",
|
"discSubtitle": "Disk Undertittel",
|
||||||
"starred": "Favoritt",
|
"starred": "Favoritt",
|
||||||
"comment": "Kommentar",
|
"comment": "Kommentar",
|
||||||
@ -27,13 +25,18 @@
|
|||||||
"quality": "Kvalitet",
|
"quality": "Kvalitet",
|
||||||
"bpm": "BPM",
|
"bpm": "BPM",
|
||||||
"playDate": "Sist Avspilt",
|
"playDate": "Sist Avspilt",
|
||||||
|
"channels": "Kanaler",
|
||||||
"createdAt": "Lagt til",
|
"createdAt": "Lagt til",
|
||||||
"grouping": "Gruppering",
|
"grouping": "Gruppering",
|
||||||
"mood": "Stemning",
|
"mood": "Stemning",
|
||||||
"participants": "Ytterlige deltakere",
|
"participants": "Ytterlige deltakere",
|
||||||
"tags": "Ytterlige Tags",
|
"tags": "Ytterlige Tags",
|
||||||
"mappedTags": "Kartlagte tags",
|
"mappedTags": "Kartlagte tags",
|
||||||
"rawTags": "Rå tags"
|
"rawTags": "Rå tags",
|
||||||
|
"bitDepth": "Bit depth",
|
||||||
|
"sampleRate": "",
|
||||||
|
"missing": "",
|
||||||
|
"libraryName": ""
|
||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"addToQueue": "Avspill senere",
|
"addToQueue": "Avspill senere",
|
||||||
@ -42,7 +45,8 @@
|
|||||||
"shuffleAll": "Shuffle Alle",
|
"shuffleAll": "Shuffle Alle",
|
||||||
"download": "Last ned",
|
"download": "Last ned",
|
||||||
"playNext": "Avspill neste",
|
"playNext": "Avspill neste",
|
||||||
"info": "Få Info"
|
"info": "Få Info",
|
||||||
|
"showInPlaylist": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"album": {
|
"album": {
|
||||||
@ -53,36 +57,38 @@
|
|||||||
"duration": "Tid",
|
"duration": "Tid",
|
||||||
"songCount": "Sanger",
|
"songCount": "Sanger",
|
||||||
"playCount": "Avspillinger",
|
"playCount": "Avspillinger",
|
||||||
"size": "Størrelse",
|
|
||||||
"name": "Navn",
|
"name": "Navn",
|
||||||
"genre": "Sjanger",
|
"genre": "Sjanger",
|
||||||
"compilation": "Samling",
|
"compilation": "Samling",
|
||||||
"year": "År",
|
"year": "År",
|
||||||
"date": "Inspillingsdato",
|
|
||||||
"originalDate": "Original",
|
|
||||||
"releaseDate": "Utgitt",
|
|
||||||
"releases": "Utgivelse |||| Utgivelser",
|
|
||||||
"released": "Utgitt",
|
|
||||||
"updatedAt": "Oppdatert",
|
"updatedAt": "Oppdatert",
|
||||||
"comment": "Kommentar",
|
"comment": "Kommentar",
|
||||||
"rating": "Rangering",
|
"rating": "Rangering",
|
||||||
"createdAt": "Lagt Til",
|
"createdAt": "Lagt Til",
|
||||||
|
"size": "Størrelse",
|
||||||
|
"originalDate": "Original",
|
||||||
|
"releaseDate": "Utgitt",
|
||||||
|
"releases": "Utgivelse |||| Utgivelser",
|
||||||
|
"released": "Utgitt",
|
||||||
"recordLabel": "Plateselskap",
|
"recordLabel": "Plateselskap",
|
||||||
"catalogNum": "Katalognummer",
|
"catalogNum": "Katalognummer",
|
||||||
"releaseType": "Type",
|
"releaseType": "Type",
|
||||||
"grouping": "Gruppering",
|
"grouping": "Gruppering",
|
||||||
"media": "Media",
|
"media": "Media",
|
||||||
"mood": "Stemning"
|
"mood": "Stemning",
|
||||||
|
"date": "Inspillingsdato",
|
||||||
|
"missing": "",
|
||||||
|
"libraryName": ""
|
||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"playAll": "Avspill",
|
"playAll": "Avspill",
|
||||||
"playNext": "Avspill Neste",
|
"playNext": "Avspill Neste",
|
||||||
"addToQueue": "Avspill Senere",
|
"addToQueue": "Avspill Senere",
|
||||||
"share": "Del",
|
|
||||||
"shuffle": "Shuffle",
|
"shuffle": "Shuffle",
|
||||||
"addToPlaylist": "Legg til i spilleliste",
|
"addToPlaylist": "Legg til i spilleliste",
|
||||||
"download": "Last ned",
|
"download": "Last ned",
|
||||||
"info": "Få Info"
|
"info": "Få Info",
|
||||||
|
"share": "Del"
|
||||||
},
|
},
|
||||||
"lists": {
|
"lists": {
|
||||||
"all": "Alle",
|
"all": "Alle",
|
||||||
@ -100,11 +106,12 @@
|
|||||||
"name": "Navn",
|
"name": "Navn",
|
||||||
"albumCount": "Album Antall",
|
"albumCount": "Album Antall",
|
||||||
"songCount": "Song Antall",
|
"songCount": "Song Antall",
|
||||||
"size": "Størrelse",
|
|
||||||
"playCount": "Avspillinger",
|
"playCount": "Avspillinger",
|
||||||
"rating": "Rangering",
|
"rating": "Rangering",
|
||||||
"genre": "Sjanger",
|
"genre": "Sjanger",
|
||||||
"role": "Rolle"
|
"size": "Størrelse",
|
||||||
|
"role": "Rolle",
|
||||||
|
"missing": ""
|
||||||
},
|
},
|
||||||
"roles": {
|
"roles": {
|
||||||
"albumartist": "Album Artist |||| Album Artister",
|
"albumartist": "Album Artist |||| Album Artister",
|
||||||
@ -119,7 +126,13 @@
|
|||||||
"mixer": "Mixer |||| Mixers",
|
"mixer": "Mixer |||| Mixers",
|
||||||
"remixer": "Remixer |||| Remixers",
|
"remixer": "Remixer |||| Remixers",
|
||||||
"djmixer": "DJ Mixer |||| DJ Mixers",
|
"djmixer": "DJ Mixer |||| DJ Mixers",
|
||||||
"performer": "Performer |||| Performers"
|
"performer": "Performer |||| Performers",
|
||||||
|
"maincredit": ""
|
||||||
|
},
|
||||||
|
"actions": {
|
||||||
|
"shuffle": "",
|
||||||
|
"radio": "",
|
||||||
|
"topSongs": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"user": {
|
"user": {
|
||||||
@ -128,7 +141,6 @@
|
|||||||
"userName": "Brukernavn",
|
"userName": "Brukernavn",
|
||||||
"isAdmin": "Admin",
|
"isAdmin": "Admin",
|
||||||
"lastLoginAt": "Sist Pålogging",
|
"lastLoginAt": "Sist Pålogging",
|
||||||
"lastAccessAt": "Sist Tilgang",
|
|
||||||
"updatedAt": "Oppdatert",
|
"updatedAt": "Oppdatert",
|
||||||
"name": "Navn",
|
"name": "Navn",
|
||||||
"password": "Passord",
|
"password": "Passord",
|
||||||
@ -136,10 +148,13 @@
|
|||||||
"changePassword": "Bytt Passord?",
|
"changePassword": "Bytt Passord?",
|
||||||
"currentPassword": "Nåværende Passord",
|
"currentPassword": "Nåværende Passord",
|
||||||
"newPassword": "Nytt Passord",
|
"newPassword": "Nytt Passord",
|
||||||
"token": "Token"
|
"token": "Token",
|
||||||
|
"lastAccessAt": "Sist Tilgang",
|
||||||
|
"libraries": ""
|
||||||
},
|
},
|
||||||
"helperTexts": {
|
"helperTexts": {
|
||||||
"name": "Navnendringer vil ikke være synlig før neste pålogging"
|
"name": "Navnendringer vil ikke være synlig før neste pålogging",
|
||||||
|
"libraries": ""
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"created": "Bruker opprettet",
|
"created": "Bruker opprettet",
|
||||||
@ -148,7 +163,12 @@
|
|||||||
},
|
},
|
||||||
"message": {
|
"message": {
|
||||||
"listenBrainzToken": "Fyll inn din ListenBrainz bruker token.",
|
"listenBrainzToken": "Fyll inn din ListenBrainz bruker token.",
|
||||||
"clickHereForToken": "Klikk her for å hente din token"
|
"clickHereForToken": "Klikk her for å hente din token",
|
||||||
|
"selectAllLibraries": "",
|
||||||
|
"adminAutoLibraries": ""
|
||||||
|
},
|
||||||
|
"validation": {
|
||||||
|
"librariesRequired": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"player": {
|
"player": {
|
||||||
@ -192,11 +212,17 @@
|
|||||||
"addNewPlaylist": "Opprett \"%{name}\"",
|
"addNewPlaylist": "Opprett \"%{name}\"",
|
||||||
"export": "Eksporter",
|
"export": "Eksporter",
|
||||||
"makePublic": "Gjør Offentlig",
|
"makePublic": "Gjør Offentlig",
|
||||||
"makePrivate": "Gjør Privat"
|
"makePrivate": "Gjør Privat",
|
||||||
|
"saveQueue": "",
|
||||||
|
"searchOrCreate": "",
|
||||||
|
"pressEnterToCreate": "",
|
||||||
|
"removeFromSelection": ""
|
||||||
},
|
},
|
||||||
"message": {
|
"message": {
|
||||||
"duplicate_song": "Legg til Duplikater",
|
"duplicate_song": "Legg til Duplikater",
|
||||||
"song_exist": "Duplikater har blitt lagt til i spillelisten. Ønsker du å legge til duplikater eller hoppe over de?"
|
"song_exist": "Duplikater har blitt lagt til i spillelisten. Ønsker du å legge til duplikater eller hoppe over de?",
|
||||||
|
"noPlaylistsFound": "",
|
||||||
|
"noPlaylists": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"radio": {
|
"radio": {
|
||||||
@ -218,7 +244,6 @@
|
|||||||
"username": "Delt Av",
|
"username": "Delt Av",
|
||||||
"url": "URL",
|
"url": "URL",
|
||||||
"description": "Beskrivelse",
|
"description": "Beskrivelse",
|
||||||
"downloadable": "Tillat Nedlastinger?",
|
|
||||||
"contents": "Innhold",
|
"contents": "Innhold",
|
||||||
"expiresAt": "Utløper",
|
"expiresAt": "Utløper",
|
||||||
"lastVisitedAt": "Sist Besøkt",
|
"lastVisitedAt": "Sist Besøkt",
|
||||||
@ -226,24 +251,82 @@
|
|||||||
"format": "Format",
|
"format": "Format",
|
||||||
"maxBitRate": "Maks. Bit Rate",
|
"maxBitRate": "Maks. Bit Rate",
|
||||||
"updatedAt": "Oppdatert",
|
"updatedAt": "Oppdatert",
|
||||||
"createdAt": "Opprettet"
|
"createdAt": "Opprettet",
|
||||||
},
|
"downloadable": "Tillat Nedlastinger?"
|
||||||
"notifications": {},
|
}
|
||||||
"actions": {}
|
|
||||||
},
|
},
|
||||||
"missing": {
|
"missing": {
|
||||||
"name": "Manglende Fil|||| Manglende Filer",
|
"name": "Manglende Fil|||| Manglende Filer",
|
||||||
"empty": "Ingen Manglende Filer",
|
|
||||||
"fields": {
|
"fields": {
|
||||||
"path": "Filsti",
|
"path": "Filsti",
|
||||||
"size": "Størrelse",
|
"size": "Størrelse",
|
||||||
"updatedAt": "Ble borte"
|
"updatedAt": "Ble borte",
|
||||||
|
"libraryName": ""
|
||||||
},
|
},
|
||||||
"actions": {
|
"actions": {
|
||||||
"remove": "Fjern"
|
"remove": "Fjern",
|
||||||
|
"remove_all": ""
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"removed": "Manglende fil(er) fjernet"
|
"removed": "Manglende fil(er) fjernet"
|
||||||
|
},
|
||||||
|
"empty": "Ingen Manglende Filer"
|
||||||
|
},
|
||||||
|
"library": {
|
||||||
|
"name": "",
|
||||||
|
"fields": {
|
||||||
|
"name": "",
|
||||||
|
"path": "",
|
||||||
|
"remotePath": "",
|
||||||
|
"lastScanAt": "",
|
||||||
|
"songCount": "",
|
||||||
|
"albumCount": "",
|
||||||
|
"artistCount": "",
|
||||||
|
"totalSongs": "",
|
||||||
|
"totalAlbums": "",
|
||||||
|
"totalArtists": "",
|
||||||
|
"totalFolders": "",
|
||||||
|
"totalFiles": "",
|
||||||
|
"totalMissingFiles": "",
|
||||||
|
"totalSize": "",
|
||||||
|
"totalDuration": "",
|
||||||
|
"defaultNewUsers": "",
|
||||||
|
"createdAt": "",
|
||||||
|
"updatedAt": ""
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"basic": "",
|
||||||
|
"statistics": ""
|
||||||
|
},
|
||||||
|
"actions": {
|
||||||
|
"scan": "",
|
||||||
|
"manageUsers": "",
|
||||||
|
"viewDetails": "",
|
||||||
|
"quickScan": "",
|
||||||
|
"fullScan": ""
|
||||||
|
},
|
||||||
|
"notifications": {
|
||||||
|
"created": "",
|
||||||
|
"updated": "",
|
||||||
|
"deleted": "Biblioteket slettet",
|
||||||
|
"scanStarted": "Skanning startet",
|
||||||
|
"scanCompleted": "",
|
||||||
|
"quickScanStarted": "",
|
||||||
|
"fullScanStarted": "",
|
||||||
|
"scanError": "Error starte skanning. Sjekk loggene"
|
||||||
|
},
|
||||||
|
"validation": {
|
||||||
|
"nameRequired": "",
|
||||||
|
"pathRequired": "",
|
||||||
|
"pathNotDirectory": "",
|
||||||
|
"pathNotFound": "",
|
||||||
|
"pathNotAccessible": "",
|
||||||
|
"pathInvalid": ""
|
||||||
|
},
|
||||||
|
"messages": {
|
||||||
|
"deleteConfirm": "",
|
||||||
|
"scanInProgress": "",
|
||||||
|
"noLibrariesAssigned": ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -282,7 +365,6 @@
|
|||||||
"add": "Legg Til",
|
"add": "Legg Til",
|
||||||
"back": "Tilbake",
|
"back": "Tilbake",
|
||||||
"bulk_actions": "1 element valgt |||| %{smart_count} elementer valgt",
|
"bulk_actions": "1 element valgt |||| %{smart_count} elementer valgt",
|
||||||
"bulk_actions_mobile": "1 |||| %{smart_count}",
|
|
||||||
"cancel": "Avbryt",
|
"cancel": "Avbryt",
|
||||||
"clear_input_value": "Nullstill verdi",
|
"clear_input_value": "Nullstill verdi",
|
||||||
"clone": "Klone",
|
"clone": "Klone",
|
||||||
@ -306,6 +388,7 @@
|
|||||||
"close_menu": "Lukk meny",
|
"close_menu": "Lukk meny",
|
||||||
"unselect": "Avvelg",
|
"unselect": "Avvelg",
|
||||||
"skip": "Hopp over",
|
"skip": "Hopp over",
|
||||||
|
"bulk_actions_mobile": "1 |||| %{smart_count}",
|
||||||
"share": "Del",
|
"share": "Del",
|
||||||
"download": "Last Ned"
|
"download": "Last Ned"
|
||||||
},
|
},
|
||||||
@ -400,31 +483,35 @@
|
|||||||
"noPlaylistsAvailable": "Ingen tilgjengelig",
|
"noPlaylistsAvailable": "Ingen tilgjengelig",
|
||||||
"delete_user_title": "Slett bruker '%{name}'",
|
"delete_user_title": "Slett bruker '%{name}'",
|
||||||
"delete_user_content": "Er du sikker på at du vil slette denne brukeren og all tilhørlig data (inkludert spillelister og preferanser)?",
|
"delete_user_content": "Er du sikker på at du vil slette denne brukeren og all tilhørlig data (inkludert spillelister og preferanser)?",
|
||||||
"remove_missing_title": "Fjern manglende filer",
|
|
||||||
"remove_missing_content": "Er du sikker på at du ønsker å fjerne de valgte manglende filene fra databasen? Dette vil permanent fjerne alle referanser til de, inkludert antall avspillinger og rangeringer.",
|
|
||||||
"notifications_blocked": "Du har blokkert notifikasjoner for denne nettsiden i din nettleser.",
|
"notifications_blocked": "Du har blokkert notifikasjoner for denne nettsiden i din nettleser.",
|
||||||
"notifications_not_available": "Denne nettleseren støtter ikke skrivebordsnotifikasjoner, eller så er du ikke tilkoblet Navidrome via https.",
|
"notifications_not_available": "Denne nettleseren støtter ikke skrivebordsnotifikasjoner, eller så er du ikke tilkoblet Navidrome via https.",
|
||||||
"lastfmLinkSuccess": "Last.fm er tilkoblet og scrobbling er aktivert",
|
"lastfmLinkSuccess": "Last.fm er tilkoblet og scrobbling er aktivert",
|
||||||
"lastfmLinkFailure": "Last.fm kunne ikke koble til",
|
"lastfmLinkFailure": "Last.fm kunne ikke koble til",
|
||||||
"lastfmUnlinkSuccess": "Last.fm er avkoblet og scrobbling er deaktivert",
|
"lastfmUnlinkSuccess": "Last.fm er avkoblet og scrobbling er deaktivert",
|
||||||
"lastfmUnlinkFailure": "Last.fm kunne ikke avkobles",
|
"lastfmUnlinkFailure": "Last.fm kunne ikke avkobles",
|
||||||
"listenBrainzLinkSuccess": "ListenBrainz er koblet til og scrobbling er aktivert som bruker: %{user}",
|
|
||||||
"listenBrainzLinkFailure": "ListenBrainz kunne ikke koble til: %{error}",
|
|
||||||
"listenBrainzUnlinkSuccess": "ListenBrainz er avkoblet og scrobbling er deaktivert",
|
|
||||||
"listenBrainzUnlinkFailure": "ListenBrainz kunne ikke avkobles",
|
|
||||||
"openIn": {
|
"openIn": {
|
||||||
"lastfm": "Åpne i Last.fm",
|
"lastfm": "Åpne i Last.fm",
|
||||||
"musicbrainz": "Åpne i MusicBrainz"
|
"musicbrainz": "Åpne i MusicBrainz"
|
||||||
},
|
},
|
||||||
"lastfmLink": "Les Mer...",
|
"lastfmLink": "Les Mer...",
|
||||||
|
"listenBrainzLinkSuccess": "ListenBrainz er koblet til og scrobbling er aktivert som bruker: %{user}",
|
||||||
|
"listenBrainzLinkFailure": "ListenBrainz kunne ikke koble til: %{error}",
|
||||||
|
"listenBrainzUnlinkSuccess": "ListenBrainz er avkoblet og scrobbling er deaktivert",
|
||||||
|
"listenBrainzUnlinkFailure": "ListenBrainz kunne ikke avkobles",
|
||||||
|
"downloadOriginalFormat": "Last ned i originalformat",
|
||||||
"shareOriginalFormat": "Del i originalformat",
|
"shareOriginalFormat": "Del i originalformat",
|
||||||
"shareDialogTitle": "Del %{resource} '%{name}'",
|
"shareDialogTitle": "Del %{resource} '%{name}'",
|
||||||
"shareBatchDialogTitle": "Del 1 %{resource} |||| Del %{smart_count} %{resource}",
|
"shareBatchDialogTitle": "Del 1 %{resource} |||| Del %{smart_count} %{resource}",
|
||||||
"shareCopyToClipboard": "Kopier til utklippstavle: Ctrl+C, Enter",
|
|
||||||
"shareSuccess": "URL kopiert til utklippstavle: %{url}",
|
"shareSuccess": "URL kopiert til utklippstavle: %{url}",
|
||||||
"shareFailure": "Error ved kopiering av URL %{url} til utklippstavle",
|
"shareFailure": "Error ved kopiering av URL %{url} til utklippstavle",
|
||||||
"downloadDialogTitle": "Last ned %{resource} '%{name}' (%{size})",
|
"downloadDialogTitle": "Last ned %{resource} '%{name}' (%{size})",
|
||||||
"downloadOriginalFormat": "Last ned i originalformat"
|
"shareCopyToClipboard": "Kopier til utklippstavle: Ctrl+C, Enter",
|
||||||
|
"remove_missing_title": "Fjern manglende filer",
|
||||||
|
"remove_missing_content": "Er du sikker på at du ønsker å fjerne de valgte manglende filene fra databasen? Dette vil permanent fjerne alle referanser til de, inkludert antall avspillinger og rangeringer.",
|
||||||
|
"remove_all_missing_title": "",
|
||||||
|
"remove_all_missing_content": "",
|
||||||
|
"noSimilarSongsFound": "",
|
||||||
|
"noTopSongsFound": ""
|
||||||
},
|
},
|
||||||
"menu": {
|
"menu": {
|
||||||
"library": "Bibliotek",
|
"library": "Bibliotek",
|
||||||
@ -438,7 +525,6 @@
|
|||||||
"language": "Språk",
|
"language": "Språk",
|
||||||
"defaultView": "Standardvisning",
|
"defaultView": "Standardvisning",
|
||||||
"desktop_notifications": "Skrivebordsnotifikasjoner",
|
"desktop_notifications": "Skrivebordsnotifikasjoner",
|
||||||
"lastfmNotConfigured": "Last.fm API-Key er ikke konfigurert",
|
|
||||||
"lastfmScrobbling": "Scrobble til Last.fm",
|
"lastfmScrobbling": "Scrobble til Last.fm",
|
||||||
"listenBrainzScrobbling": "Scrobble til ListenBrainz",
|
"listenBrainzScrobbling": "Scrobble til ListenBrainz",
|
||||||
"replaygain": "ReplayGain Mode",
|
"replaygain": "ReplayGain Mode",
|
||||||
@ -447,13 +533,20 @@
|
|||||||
"none": "Deaktivert",
|
"none": "Deaktivert",
|
||||||
"album": "Bruk Album Gain",
|
"album": "Bruk Album Gain",
|
||||||
"track": "Bruk Track Gain"
|
"track": "Bruk Track Gain"
|
||||||
}
|
},
|
||||||
|
"lastfmNotConfigured": "Last.fm API-Key er ikke konfigurert"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"albumList": "Album",
|
"albumList": "Album",
|
||||||
|
"about": "Om",
|
||||||
"playlists": "Spillelister",
|
"playlists": "Spillelister",
|
||||||
"sharedPlaylists": "Delte Spillelister",
|
"sharedPlaylists": "Delte Spillelister",
|
||||||
"about": "Om"
|
"librarySelector": {
|
||||||
|
"allLibraries": "",
|
||||||
|
"multipleLibraries": "",
|
||||||
|
"selectLibraries": "",
|
||||||
|
"none": ""
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"player": {
|
"player": {
|
||||||
"playListsText": "Spill Av Kø",
|
"playListsText": "Spill Av Kø",
|
||||||
@ -490,6 +583,21 @@
|
|||||||
"disabled": "Deaktivert",
|
"disabled": "Deaktivert",
|
||||||
"waiting": "Venter"
|
"waiting": "Venter"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"tabs": {
|
||||||
|
"about": "",
|
||||||
|
"config": ""
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"configName": "",
|
||||||
|
"environmentVariable": "",
|
||||||
|
"currentValue": "",
|
||||||
|
"configurationFile": "",
|
||||||
|
"exportToml": "",
|
||||||
|
"exportSuccess": "",
|
||||||
|
"exportFailed": "",
|
||||||
|
"devFlagsHeader": "",
|
||||||
|
"devFlagsComment": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"activity": {
|
"activity": {
|
||||||
@ -498,7 +606,11 @@
|
|||||||
"quickScan": "Hurtigskann",
|
"quickScan": "Hurtigskann",
|
||||||
"fullScan": "Full Skann",
|
"fullScan": "Full Skann",
|
||||||
"serverUptime": "Server Oppetid",
|
"serverUptime": "Server Oppetid",
|
||||||
"serverDown": "OFFLINE"
|
"serverDown": "OFFLINE",
|
||||||
|
"scanType": "",
|
||||||
|
"status": "",
|
||||||
|
"elapsedTime": "",
|
||||||
|
"selectiveScan": "Utvalgt"
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"title": "Navidrome Hurtigtaster",
|
"title": "Navidrome Hurtigtaster",
|
||||||
@ -508,10 +620,15 @@
|
|||||||
"toggle_play": "Avspill / Pause",
|
"toggle_play": "Avspill / Pause",
|
||||||
"prev_song": "Forrige Sang",
|
"prev_song": "Forrige Sang",
|
||||||
"next_song": "Neste Sang",
|
"next_song": "Neste Sang",
|
||||||
"current_song": "Gå til Nåværende Sang",
|
|
||||||
"vol_up": "Volum Opp",
|
"vol_up": "Volum Opp",
|
||||||
"vol_down": "Volum Ned",
|
"vol_down": "Volum Ned",
|
||||||
"toggle_love": "Legg til spor i favoritter"
|
"toggle_love": "Legg til spor i favoritter",
|
||||||
|
"current_song": "Gå til Nåværende Sang"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"nowPlaying": {
|
||||||
|
"title": "",
|
||||||
|
"empty": "",
|
||||||
|
"minutesAgo": ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -301,14 +301,19 @@
|
|||||||
"actions": {
|
"actions": {
|
||||||
"scan": "Kütüphaneyi Tara",
|
"scan": "Kütüphaneyi Tara",
|
||||||
"manageUsers": "Kullanıcı Erişimini Yönet",
|
"manageUsers": "Kullanıcı Erişimini Yönet",
|
||||||
"viewDetails": "Ayrıntıları Görüntüle"
|
"viewDetails": "Ayrıntıları Görüntüle",
|
||||||
|
"quickScan": "Hızlı Tarama",
|
||||||
|
"fullScan": "Tam Tarama"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"created": "Kütüphane başarıyla oluşturuldu",
|
"created": "Kütüphane başarıyla oluşturuldu",
|
||||||
"updated": "Kütüphane başarıyla güncellendi",
|
"updated": "Kütüphane başarıyla güncellendi",
|
||||||
"deleted": "Kütüphane başarıyla silindi",
|
"deleted": "Kütüphane başarıyla silindi",
|
||||||
"scanStarted": "Kütüphane taraması başladı",
|
"scanStarted": "Kütüphane taraması başladı",
|
||||||
"scanCompleted": "Kütüphane taraması tamamlandı"
|
"scanCompleted": "Kütüphane taraması tamamlandı",
|
||||||
|
"quickScanStarted": "Hızlı tarama başlatıldı",
|
||||||
|
"fullScanStarted": "Tam tarama başlatıldı",
|
||||||
|
"scanError": "Tarama başlatılırken hata oluştu. Günlükleri kontrol edin."
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": {
|
||||||
"nameRequired": "Kütüphane adı gereklidir",
|
"nameRequired": "Kütüphane adı gereklidir",
|
||||||
@ -604,7 +609,8 @@
|
|||||||
"serverDown": "ÇEVRİMDIŞI",
|
"serverDown": "ÇEVRİMDIŞI",
|
||||||
"scanType": "Tür",
|
"scanType": "Tür",
|
||||||
"status": "Tarama Hatası",
|
"status": "Tarama Hatası",
|
||||||
"elapsedTime": "Geçen Süre"
|
"elapsedTime": "Geçen Süre",
|
||||||
|
"selectiveScan": "Seçmeli"
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"title": "Navidrome Kısayolları",
|
"title": "Navidrome Kısayolları",
|
||||||
|
|||||||
@ -14,6 +14,12 @@ import (
|
|||||||
"github.com/navidrome/navidrome/model"
|
"github.com/navidrome/navidrome/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// argLengthThreshold is the threshold for switching from command-line args to file-based target passing.
|
||||||
|
// Set conservatively at 24KB to support Windows (~32KB limit) with margin for env vars.
|
||||||
|
argLengthThreshold = 24 * 1024
|
||||||
|
)
|
||||||
|
|
||||||
// scannerExternal is a scanner that runs an external process to do the scanning. It is used to avoid
|
// scannerExternal is a scanner that runs an external process to do the scanning. It is used to avoid
|
||||||
// memory leaks or retention in the main process, as the scanner can consume a lot of memory. The
|
// memory leaks or retention in the main process, as the scanner can consume a lot of memory. The
|
||||||
// external process will be spawned with the same executable as the current process, and will run
|
// external process will be spawned with the same executable as the current process, and will run
|
||||||
@ -45,10 +51,14 @@ func (s *scannerExternal) scan(ctx context.Context, fullScan bool, targets []mod
|
|||||||
|
|
||||||
// Add targets if provided
|
// Add targets if provided
|
||||||
if len(targets) > 0 {
|
if len(targets) > 0 {
|
||||||
for _, target := range targets {
|
targetArgs, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||||
args = append(args, "-t", target.String())
|
if err != nil {
|
||||||
|
progress <- &ProgressInfo{Error: err.Error()}
|
||||||
|
return
|
||||||
}
|
}
|
||||||
log.Debug(ctx, "Spawning external scanner process with targets", "fullScan", fullScan, "path", exe, "targets", targets)
|
defer cleanup()
|
||||||
|
log.Debug(ctx, "Spawning external scanner process with target file", "fullScan", fullScan, "path", exe, "numTargets", len(targets))
|
||||||
|
args = append(args, targetArgs...)
|
||||||
} else {
|
} else {
|
||||||
log.Debug(ctx, "Spawning external scanner process", "fullScan", fullScan, "path", exe)
|
log.Debug(ctx, "Spawning external scanner process", "fullScan", fullScan, "path", exe)
|
||||||
}
|
}
|
||||||
@ -98,4 +108,62 @@ func (s *scannerExternal) wait(cmd *exec.Cmd, out *io.PipeWriter) {
|
|||||||
_ = out.Close()
|
_ = out.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// targetArguments builds command-line arguments for the given scan targets.
|
||||||
|
// If the estimated argument length exceeds a threshold, it writes the targets to a temp file
|
||||||
|
// and returns the --target-file argument instead.
|
||||||
|
// Returns the arguments, a cleanup function to remove any temp file created, and an error if any.
|
||||||
|
func targetArguments(ctx context.Context, targets []model.ScanTarget, lengthThreshold int) ([]string, func(), error) {
|
||||||
|
var args []string
|
||||||
|
|
||||||
|
// Estimate argument length to decide whether to use file-based approach
|
||||||
|
argLength := estimateArgLength(targets)
|
||||||
|
|
||||||
|
if argLength > lengthThreshold {
|
||||||
|
// Write targets to temp file and pass via --target-file
|
||||||
|
targetFile, err := writeTargetsToFile(targets)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to write targets to file: %w", err)
|
||||||
|
}
|
||||||
|
args = append(args, "--target-file", targetFile)
|
||||||
|
return args, func() {
|
||||||
|
os.Remove(targetFile) // Clean up temp file
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use command-line arguments for small target lists
|
||||||
|
for _, target := range targets {
|
||||||
|
args = append(args, "-t", target.String())
|
||||||
|
}
|
||||||
|
return args, func() {}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// estimateArgLength estimates the total length of command-line arguments for the given targets.
|
||||||
|
func estimateArgLength(targets []model.ScanTarget) int {
|
||||||
|
length := 0
|
||||||
|
for _, target := range targets {
|
||||||
|
// Each target adds: "-t " + target string + space
|
||||||
|
length += 3 + len(target.String()) + 1
|
||||||
|
}
|
||||||
|
return length
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeTargetsToFile writes the targets to a temporary file, one per line.
|
||||||
|
// Returns the path to the temp file, which the caller should clean up.
|
||||||
|
func writeTargetsToFile(targets []model.ScanTarget) (string, error) {
|
||||||
|
tmpFile, err := os.CreateTemp("", "navidrome-scan-targets-*.txt")
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to create temp file: %w", err)
|
||||||
|
}
|
||||||
|
defer tmpFile.Close()
|
||||||
|
|
||||||
|
for _, target := range targets {
|
||||||
|
if _, err := fmt.Fprintln(tmpFile, target.String()); err != nil {
|
||||||
|
os.Remove(tmpFile.Name())
|
||||||
|
return "", fmt.Errorf("failed to write to temp file: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tmpFile.Name(), nil
|
||||||
|
}
|
||||||
|
|
||||||
var _ scanner = (*scannerExternal)(nil)
|
var _ scanner = (*scannerExternal)(nil)
|
||||||
|
|||||||
160
scanner/external_test.go
Normal file
160
scanner/external_test.go
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
package scanner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("targetArguments", func() {
|
||||||
|
var ctx context.Context
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
ctx = GinkgoT().Context()
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with small target list", func() {
|
||||||
|
It("returns command-line arguments for single target", func() {
|
||||||
|
targets := []model.ScanTarget{
|
||||||
|
{LibraryID: 1, FolderPath: "Music/Rock"},
|
||||||
|
}
|
||||||
|
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args).To(Equal([]string{"-t", "1:Music/Rock"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns command-line arguments for multiple targets", func() {
|
||||||
|
targets := []model.ScanTarget{
|
||||||
|
{LibraryID: 1, FolderPath: "Music/Rock"},
|
||||||
|
{LibraryID: 2, FolderPath: "Music/Jazz"},
|
||||||
|
{LibraryID: 3, FolderPath: "Classical"},
|
||||||
|
}
|
||||||
|
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args).To(Equal([]string{
|
||||||
|
"-t", "1:Music/Rock",
|
||||||
|
"-t", "2:Music/Jazz",
|
||||||
|
"-t", "3:Classical",
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles targets with special characters", func() {
|
||||||
|
targets := []model.ScanTarget{
|
||||||
|
{LibraryID: 1, FolderPath: "Music/Rock & Roll"},
|
||||||
|
{LibraryID: 2, FolderPath: "Music/Jazz (Modern)"},
|
||||||
|
}
|
||||||
|
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args).To(Equal([]string{
|
||||||
|
"-t", "1:Music/Rock & Roll",
|
||||||
|
"-t", "2:Music/Jazz (Modern)",
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with large target list exceeding threshold", func() {
|
||||||
|
It("returns --target-file argument when exceeding threshold", func() {
|
||||||
|
// Create enough targets to exceed the threshold
|
||||||
|
var targets []model.ScanTarget
|
||||||
|
for i := 1; i <= 600; i++ {
|
||||||
|
targets = append(targets, model.ScanTarget{
|
||||||
|
LibraryID: 1,
|
||||||
|
FolderPath: "Music/VeryLongFolderPathToSimulateRealScenario/SubFolder",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args).To(HaveLen(2))
|
||||||
|
Expect(args[0]).To(Equal("--target-file"))
|
||||||
|
|
||||||
|
// Verify the file exists and has correct format
|
||||||
|
filePath := args[1]
|
||||||
|
Expect(filePath).To(ContainSubstring("navidrome-scan-targets-"))
|
||||||
|
Expect(filePath).To(HaveSuffix(".txt"))
|
||||||
|
|
||||||
|
// Verify file actually exists
|
||||||
|
_, err = os.Stat(filePath)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("creates temp file with correct format", func() {
|
||||||
|
// Use custom threshold to easily exceed it
|
||||||
|
targets := []model.ScanTarget{
|
||||||
|
{LibraryID: 1, FolderPath: "Music/Rock"},
|
||||||
|
{LibraryID: 2, FolderPath: "Music/Jazz"},
|
||||||
|
{LibraryID: 3, FolderPath: "Classical"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set threshold very low to force file usage
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, 10)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args[0]).To(Equal("--target-file"))
|
||||||
|
|
||||||
|
// Verify file exists with correct format
|
||||||
|
filePath := args[1]
|
||||||
|
Expect(filePath).To(ContainSubstring("navidrome-scan-targets-"))
|
||||||
|
Expect(filePath).To(HaveSuffix(".txt"))
|
||||||
|
|
||||||
|
// Verify file content
|
||||||
|
content, err := os.ReadFile(filePath)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
Expect(lines).To(HaveLen(3))
|
||||||
|
Expect(lines[0]).To(Equal("1:Music/Rock"))
|
||||||
|
Expect(lines[1]).To(Equal("2:Music/Jazz"))
|
||||||
|
Expect(lines[2]).To(Equal("3:Classical"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("edge cases", func() {
|
||||||
|
It("handles empty target list", func() {
|
||||||
|
var targets []model.ScanTarget
|
||||||
|
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses command-line args when exactly at threshold", func() {
|
||||||
|
// Create targets that are exactly at threshold
|
||||||
|
targets := []model.ScanTarget{
|
||||||
|
{LibraryID: 1, FolderPath: "Music"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Estimate length should be 11 bytes
|
||||||
|
estimatedLength := estimateArgLength(targets)
|
||||||
|
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, estimatedLength)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args).To(Equal([]string{"-t", "1:Music"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses file when one byte over threshold", func() {
|
||||||
|
targets := []model.ScanTarget{
|
||||||
|
{LibraryID: 1, FolderPath: "Music"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set threshold just below the estimated length
|
||||||
|
estimatedLength := estimateArgLength(targets)
|
||||||
|
args, cleanup, err := targetArguments(ctx, targets, estimatedLength-1)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
defer cleanup()
|
||||||
|
Expect(args[0]).To(Equal("--target-file"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -76,6 +76,12 @@ func newScanJob(ctx context.Context, ds model.DataStore, cw artwork.CacheWarmer,
|
|||||||
log.Error(ctx, "Error getting fs for library", "library", lib.Name, "path", lib.Path, err)
|
log.Error(ctx, "Error getting fs for library", "library", lib.Name, "path", lib.Path, err)
|
||||||
return nil, fmt.Errorf("getting fs for library: %w", err)
|
return nil, fmt.Errorf("getting fs for library: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure FullScanInProgress reflects the current scan request.
|
||||||
|
// This is important when resuming an interrupted quick scan as a full scan:
|
||||||
|
// the DB may have FullScanInProgress=false, but we need it true for isOutdated() to work correctly.
|
||||||
|
lib.FullScanInProgress = lib.FullScanInProgress || fullScan
|
||||||
|
|
||||||
return &scanJob{
|
return &scanJob{
|
||||||
lib: lib,
|
lib: lib,
|
||||||
fs: fsys,
|
fs: fsys,
|
||||||
|
|||||||
@ -675,6 +675,155 @@ var _ = Describe("Scanner", Ordered, func() {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Describe("Interrupted scan resumption", func() {
|
||||||
|
var fsys storagetest.FakeFS
|
||||||
|
var help func(...map[string]any) *fstest.MapFile
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
help = template(_t{"albumartist": "The Beatles", "album": "Help!", "year": 1965})
|
||||||
|
fsys = createFS(fstest.MapFS{
|
||||||
|
"The Beatles/Help!/01 - Help!.mp3": help(track(1, "Help!")),
|
||||||
|
"The Beatles/Help!/02 - The Night Before.mp3": help(track(2, "The Night Before")),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
simulateInterruptedScan := func(fullScan bool) {
|
||||||
|
// Call ScanBegin to properly set LastScanStartedAt and FullScanInProgress
|
||||||
|
// This simulates what would happen if a scan was interrupted (ScanBegin called but ScanEnd not)
|
||||||
|
Expect(ds.Library(ctx).ScanBegin(lib.ID, fullScan)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify the update was persisted
|
||||||
|
reloaded, err := ds.Library(ctx).Get(lib.ID)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(reloaded.LastScanStartedAt).ToNot(BeZero())
|
||||||
|
Expect(reloaded.FullScanInProgress).To(Equal(fullScan))
|
||||||
|
}
|
||||||
|
|
||||||
|
Context("when a quick scan is interrupted and resumed with a full scan request", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
// First, complete a full scan to populate the database
|
||||||
|
Expect(runScanner(ctx, true)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify files were imported
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(2))
|
||||||
|
|
||||||
|
// Now simulate an interrupted quick scan
|
||||||
|
// (LastScanStartedAt is set, FullScanInProgress is false)
|
||||||
|
simulateInterruptedScan(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should rescan all folders when resumed as full scan", func() {
|
||||||
|
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||||
|
// In a quick scan, this wouldn't be detected because the folder hash hasn't changed.
|
||||||
|
// But in a full scan, all files should be re-read regardless of hash.
|
||||||
|
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||||
|
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "updated comment"}, origModTime)
|
||||||
|
|
||||||
|
// Resume with a full scan - this should process all folders
|
||||||
|
// even though folder hashes haven't changed
|
||||||
|
Expect(runScanner(ctx, true)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify the comment was updated (which means the folder was processed and file re-imported)
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||||
|
Filters: squirrel.Eq{"title": "Help!"},
|
||||||
|
})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(1))
|
||||||
|
Expect(mfs[0].Comment).To(Equal("updated comment"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when a full scan is interrupted and resumed with a quick scan request", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
// First, complete a full scan to populate the database
|
||||||
|
Expect(runScanner(ctx, true)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify files were imported
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(2))
|
||||||
|
|
||||||
|
// Now simulate an interrupted full scan
|
||||||
|
// (LastScanStartedAt is set, FullScanInProgress is true)
|
||||||
|
simulateInterruptedScan(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should continue as full scan even when quick scan is requested", func() {
|
||||||
|
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||||
|
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||||
|
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "full scan comment"}, origModTime)
|
||||||
|
|
||||||
|
// Request a quick scan - but because a full scan was in progress,
|
||||||
|
// it should continue as a full scan
|
||||||
|
Expect(runScanner(ctx, false)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify the comment was updated (folder was processed despite unchanged hash)
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||||
|
Filters: squirrel.Eq{"title": "Help!"},
|
||||||
|
})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(1))
|
||||||
|
Expect(mfs[0].Comment).To(Equal("full scan comment"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when no scan was in progress", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
// First, complete a full scan to populate the database
|
||||||
|
Expect(runScanner(ctx, true)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify files were imported
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(2))
|
||||||
|
|
||||||
|
// Library should have LastScanStartedAt cleared after successful scan
|
||||||
|
updatedLib, err := ds.Library(ctx).Get(lib.ID)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(updatedLib.LastScanStartedAt).To(BeZero())
|
||||||
|
Expect(updatedLib.FullScanInProgress).To(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should respect the full scan flag for new scans", func() {
|
||||||
|
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||||
|
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||||
|
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "new full scan"}, origModTime)
|
||||||
|
|
||||||
|
// Start a new full scan
|
||||||
|
Expect(runScanner(ctx, true)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify the comment was updated
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||||
|
Filters: squirrel.Eq{"title": "Help!"},
|
||||||
|
})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(1))
|
||||||
|
Expect(mfs[0].Comment).To(Equal("new full scan"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not rescan unchanged folders during quick scan", func() {
|
||||||
|
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||||
|
// This simulates editing tags in a file (e.g., with a tag editor) without modifying its timestamp.
|
||||||
|
// In a quick scan, this should NOT be detected because the folder hash remains unchanged.
|
||||||
|
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||||
|
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "should not appear"}, origModTime)
|
||||||
|
|
||||||
|
// Do a quick scan - unchanged folders should be skipped
|
||||||
|
Expect(runScanner(ctx, false)).To(Succeed())
|
||||||
|
|
||||||
|
// Verify the comment was NOT updated (folder was skipped)
|
||||||
|
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||||
|
Filters: squirrel.Eq{"title": "Help!"},
|
||||||
|
})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mfs).To(HaveLen(1))
|
||||||
|
Expect(mfs[0].Comment).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
Describe("RefreshStats", func() {
|
Describe("RefreshStats", func() {
|
||||||
var refreshStatsCalls []bool
|
var refreshStatsCalls []bool
|
||||||
var fsys storagetest.FakeFS
|
var fsys storagetest.FakeFS
|
||||||
|
|||||||
@ -107,7 +107,7 @@ func secureMiddleware() func(http.Handler) http.Handler {
|
|||||||
FrameDeny: true,
|
FrameDeny: true,
|
||||||
ReferrerPolicy: "same-origin",
|
ReferrerPolicy: "same-origin",
|
||||||
PermissionsPolicy: "autoplay=(), camera=(), microphone=(), usb=()",
|
PermissionsPolicy: "autoplay=(), camera=(), microphone=(), usb=()",
|
||||||
CustomFrameOptionsValue: conf.Server.HTTPSecurityHeaders.CustomFrameOptionsValue,
|
CustomFrameOptionsValue: conf.Server.HTTPHeaders.FrameOptions,
|
||||||
//ContentSecurityPolicy: "script-src 'self' 'unsafe-inline'",
|
//ContentSecurityPolicy: "script-src 'self' 'unsafe-inline'",
|
||||||
})
|
})
|
||||||
return sec.Handler
|
return sec.Handler
|
||||||
|
|||||||
@ -118,11 +118,7 @@ func (api *Router) routes() http.Handler {
|
|||||||
hr(r, "getAlbumList2", api.GetAlbumList2)
|
hr(r, "getAlbumList2", api.GetAlbumList2)
|
||||||
h(r, "getStarred", api.GetStarred)
|
h(r, "getStarred", api.GetStarred)
|
||||||
h(r, "getStarred2", api.GetStarred2)
|
h(r, "getStarred2", api.GetStarred2)
|
||||||
if conf.Server.EnableNowPlaying {
|
h(r, "getNowPlaying", api.GetNowPlaying)
|
||||||
h(r, "getNowPlaying", api.GetNowPlaying)
|
|
||||||
} else {
|
|
||||||
h501(r, "getNowPlaying")
|
|
||||||
}
|
|
||||||
h(r, "getRandomSongs", api.GetRandomSongs)
|
h(r, "getRandomSongs", api.GetRandomSongs)
|
||||||
h(r, "getSongsByGenre", api.GetSongsByGenre)
|
h(r, "getSongsByGenre", api.GetSongsByGenre)
|
||||||
})
|
})
|
||||||
|
|||||||
@ -2,11 +2,13 @@ package subsonic
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/navidrome/navidrome/conf"
|
"github.com/navidrome/navidrome/conf"
|
||||||
"github.com/navidrome/navidrome/model"
|
"github.com/navidrome/navidrome/model"
|
||||||
"github.com/navidrome/navidrome/model/request"
|
"github.com/navidrome/navidrome/model/request"
|
||||||
"github.com/navidrome/navidrome/server/subsonic/responses"
|
"github.com/navidrome/navidrome/server/subsonic/responses"
|
||||||
|
"github.com/navidrome/navidrome/utils/req"
|
||||||
"github.com/navidrome/navidrome/utils/slice"
|
"github.com/navidrome/navidrome/utils/slice"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -35,7 +37,13 @@ func (api *Router) GetUser(r *http.Request) (*responses.Subsonic, error) {
|
|||||||
if !ok {
|
if !ok {
|
||||||
return nil, newError(responses.ErrorGeneric, "Internal error")
|
return nil, newError(responses.ErrorGeneric, "Internal error")
|
||||||
}
|
}
|
||||||
|
username, err := req.Params(r).String("username")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !strings.EqualFold(username, loggedUser.UserName) {
|
||||||
|
return nil, newError(responses.ErrorAuthorizationFail)
|
||||||
|
}
|
||||||
response := newResponse()
|
response := newResponse()
|
||||||
user := buildUserResponse(loggedUser)
|
user := buildUserResponse(loggedUser)
|
||||||
response.User = &user
|
response.User = &user
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
package subsonic
|
package subsonic
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"errors"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
|
|
||||||
"github.com/navidrome/navidrome/conf"
|
"github.com/navidrome/navidrome/conf"
|
||||||
@ -43,8 +43,8 @@ var _ = Describe("Users", func() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create request with user in context
|
// Create request with user in context
|
||||||
req := httptest.NewRequest("GET", "/rest/getUser", nil)
|
req := httptest.NewRequest("GET", "/rest/getUser?username=testuser", nil)
|
||||||
ctx := request.WithUser(context.Background(), testUser)
|
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||||
req = req.WithContext(ctx)
|
req = req.WithContext(ctx)
|
||||||
|
|
||||||
userResponse, err1 := router.GetUser(req)
|
userResponse, err1 := router.GetUser(req)
|
||||||
@ -116,4 +116,60 @@ var _ = Describe("Users", func() {
|
|||||||
Expect(response.Folder).To(ContainElements(int32(1), int32(2), int32(5)))
|
Expect(response.Folder).To(ContainElements(int32(1), int32(2), int32(5)))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Describe("GetUser authorization", func() {
|
||||||
|
It("should allow user to request their own information", func() {
|
||||||
|
req := httptest.NewRequest("GET", "/rest/getUser?username=testuser", nil)
|
||||||
|
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
|
||||||
|
response, err := router.GetUser(req)
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(response).ToNot(BeNil())
|
||||||
|
Expect(response.User).ToNot(BeNil())
|
||||||
|
Expect(response.User.Username).To(Equal("testuser"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should deny user from requesting another user's information", func() {
|
||||||
|
req := httptest.NewRequest("GET", "/rest/getUser?username=anotheruser", nil)
|
||||||
|
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
|
||||||
|
response, err := router.GetUser(req)
|
||||||
|
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(response).To(BeNil())
|
||||||
|
|
||||||
|
var subErr subError
|
||||||
|
ok := errors.As(err, &subErr)
|
||||||
|
Expect(ok).To(BeTrue())
|
||||||
|
Expect(subErr.code).To(Equal(responses.ErrorAuthorizationFail))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return error when username parameter is missing", func() {
|
||||||
|
req := httptest.NewRequest("GET", "/rest/getUser", nil)
|
||||||
|
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
|
||||||
|
response, err := router.GetUser(req)
|
||||||
|
|
||||||
|
Expect(err).To(MatchError("missing parameter: 'username'"))
|
||||||
|
Expect(response).To(BeNil())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return error when user context is missing", func() {
|
||||||
|
req := httptest.NewRequest("GET", "/rest/getUser?username=testuser", nil)
|
||||||
|
|
||||||
|
response, err := router.GetUser(req)
|
||||||
|
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(response).To(BeNil())
|
||||||
|
|
||||||
|
var subErr subError
|
||||||
|
ok := errors.As(err, &subErr)
|
||||||
|
Expect(ok).To(BeTrue())
|
||||||
|
Expect(subErr.code).To(Equal(responses.ErrorGeneric))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -25,6 +25,7 @@ type MockDataStore struct {
|
|||||||
MockedTranscoding model.TranscodingRepository
|
MockedTranscoding model.TranscodingRepository
|
||||||
MockedUserProps model.UserPropsRepository
|
MockedUserProps model.UserPropsRepository
|
||||||
MockedScrobbleBuffer model.ScrobbleBufferRepository
|
MockedScrobbleBuffer model.ScrobbleBufferRepository
|
||||||
|
MockedScrobble model.ScrobbleRepository
|
||||||
MockedRadio model.RadioRepository
|
MockedRadio model.RadioRepository
|
||||||
scrobbleBufferMu sync.Mutex
|
scrobbleBufferMu sync.Mutex
|
||||||
repoMu sync.Mutex
|
repoMu sync.Mutex
|
||||||
@ -208,12 +209,23 @@ func (db *MockDataStore) ScrobbleBuffer(ctx context.Context) model.ScrobbleBuffe
|
|||||||
if db.RealDS != nil {
|
if db.RealDS != nil {
|
||||||
db.MockedScrobbleBuffer = db.RealDS.ScrobbleBuffer(ctx)
|
db.MockedScrobbleBuffer = db.RealDS.ScrobbleBuffer(ctx)
|
||||||
} else {
|
} else {
|
||||||
db.MockedScrobbleBuffer = CreateMockedScrobbleBufferRepo()
|
db.MockedScrobbleBuffer = &MockedScrobbleBufferRepo{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return db.MockedScrobbleBuffer
|
return db.MockedScrobbleBuffer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (db *MockDataStore) Scrobble(ctx context.Context) model.ScrobbleRepository {
|
||||||
|
if db.MockedScrobble == nil {
|
||||||
|
if db.RealDS != nil {
|
||||||
|
db.MockedScrobble = db.RealDS.Scrobble(ctx)
|
||||||
|
} else {
|
||||||
|
db.MockedScrobble = &MockScrobbleRepo{ctx: ctx}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return db.MockedScrobble
|
||||||
|
}
|
||||||
|
|
||||||
func (db *MockDataStore) Radio(ctx context.Context) model.RadioRepository {
|
func (db *MockDataStore) Radio(ctx context.Context) model.RadioRepository {
|
||||||
if db.MockedRadio == nil {
|
if db.MockedRadio == nil {
|
||||||
if db.RealDS != nil {
|
if db.RealDS != nil {
|
||||||
|
|||||||
24
tests/mock_scrobble_repo.go
Normal file
24
tests/mock_scrobble_repo.go
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
package tests
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/model/request"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MockScrobbleRepo struct {
|
||||||
|
RecordedScrobbles []model.Scrobble
|
||||||
|
ctx context.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MockScrobbleRepo) RecordScrobble(fileID string, submissionTime time.Time) error {
|
||||||
|
user, _ := request.UserFrom(m.ctx)
|
||||||
|
m.RecordedScrobbles = append(m.RecordedScrobbles, model.Scrobble{
|
||||||
|
MediaFileID: fileID,
|
||||||
|
UserID: user.ID,
|
||||||
|
SubmissionTime: submissionTime,
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
7
ui/package-lock.json
generated
7
ui/package-lock.json
generated
@ -7173,10 +7173,11 @@
|
|||||||
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
|
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
|
||||||
},
|
},
|
||||||
"node_modules/js-yaml": {
|
"node_modules/js-yaml": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"argparse": "^2.0.1"
|
"argparse": "^2.0.1"
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,12 +1,15 @@
|
|||||||
import React from 'react'
|
import React from 'react'
|
||||||
import { TopToolbar, ExportButton } from 'react-admin'
|
import { TopToolbar, ExportButton, useListContext } from 'react-admin'
|
||||||
import DeleteMissingFilesButton from './DeleteMissingFilesButton.jsx'
|
import DeleteMissingFilesButton from './DeleteMissingFilesButton.jsx'
|
||||||
|
|
||||||
const MissingListActions = (props) => (
|
const MissingListActions = (props) => {
|
||||||
<TopToolbar {...props}>
|
const { total } = useListContext()
|
||||||
<ExportButton />
|
return (
|
||||||
<DeleteMissingFilesButton deleteAll />
|
<TopToolbar {...props}>
|
||||||
</TopToolbar>
|
<ExportButton maxResults={total} />
|
||||||
)
|
<DeleteMissingFilesButton deleteAll />
|
||||||
|
</TopToolbar>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
export default MissingListActions
|
export default MissingListActions
|
||||||
|
|||||||
@ -16,6 +16,7 @@ import {
|
|||||||
usePermissions,
|
usePermissions,
|
||||||
} from 'react-admin'
|
} from 'react-admin'
|
||||||
import Switch from '@material-ui/core/Switch'
|
import Switch from '@material-ui/core/Switch'
|
||||||
|
import { makeStyles } from '@material-ui/core/styles'
|
||||||
import { useMediaQuery } from '@material-ui/core'
|
import { useMediaQuery } from '@material-ui/core'
|
||||||
import {
|
import {
|
||||||
DurationField,
|
DurationField,
|
||||||
@ -28,6 +29,12 @@ import {
|
|||||||
import PlaylistListActions from './PlaylistListActions'
|
import PlaylistListActions from './PlaylistListActions'
|
||||||
import ChangePublicStatusButton from './ChangePublicStatusButton'
|
import ChangePublicStatusButton from './ChangePublicStatusButton'
|
||||||
|
|
||||||
|
const useStyles = makeStyles((theme) => ({
|
||||||
|
button: {
|
||||||
|
color: theme.palette.type === 'dark' ? 'white' : undefined,
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
const PlaylistFilter = (props) => {
|
const PlaylistFilter = (props) => {
|
||||||
const { permissions } = usePermissions()
|
const { permissions } = usePermissions()
|
||||||
return (
|
return (
|
||||||
@ -112,13 +119,24 @@ const ToggleAutoImport = ({ resource, source }) => {
|
|||||||
) : null
|
) : null
|
||||||
}
|
}
|
||||||
|
|
||||||
const PlaylistListBulkActions = (props) => (
|
const PlaylistListBulkActions = (props) => {
|
||||||
<>
|
const classes = useStyles()
|
||||||
<ChangePublicStatusButton public={true} {...props} />
|
return (
|
||||||
<ChangePublicStatusButton public={false} {...props} />
|
<>
|
||||||
<BulkDeleteButton {...props} />
|
<ChangePublicStatusButton
|
||||||
</>
|
public={true}
|
||||||
)
|
{...props}
|
||||||
|
className={classes.button}
|
||||||
|
/>
|
||||||
|
<ChangePublicStatusButton
|
||||||
|
public={false}
|
||||||
|
{...props}
|
||||||
|
className={classes.button}
|
||||||
|
/>
|
||||||
|
<BulkDeleteButton {...props} className={classes.button} />
|
||||||
|
</>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const PlaylistList = (props) => {
|
const PlaylistList = (props) => {
|
||||||
const isXsmall = useMediaQuery((theme) => theme.breakpoints.down('xs'))
|
const isXsmall = useMediaQuery((theme) => theme.breakpoints.down('xs'))
|
||||||
|
|||||||
@ -1,4 +1,10 @@
|
|||||||
const stylesheet = `
|
const stylesheet = `
|
||||||
|
.react-jinke-music-player-main .music-player-panel svg {
|
||||||
|
color: #eee
|
||||||
|
}
|
||||||
|
.react-jinke-music-player-main .music-player-panel button:disabled svg {
|
||||||
|
opacity: 0.3
|
||||||
|
}
|
||||||
.react-jinke-music-player-main svg:active, .react-jinke-music-player-main svg:hover {
|
.react-jinke-music-player-main svg:active, .react-jinke-music-player-main svg:hover {
|
||||||
color: #D60017
|
color: #D60017
|
||||||
}
|
}
|
||||||
@ -27,7 +33,6 @@ const stylesheet = `
|
|||||||
.react-jinke-music-player-main .audio-item.playing .player-singer {
|
.react-jinke-music-player-main .audio-item.playing .player-singer {
|
||||||
color: #ff4e6b !important
|
color: #ff4e6b !important
|
||||||
}
|
}
|
||||||
.react-jinke-music-player-main .lyric-btn,
|
|
||||||
.react-jinke-music-player-main .lyric-btn-active svg{
|
.react-jinke-music-player-main .lyric-btn-active svg{
|
||||||
color: #ff4e6b !important
|
color: #ff4e6b !important
|
||||||
}
|
}
|
||||||
|
|||||||
@ -194,7 +194,12 @@ export default {
|
|||||||
},
|
},
|
||||||
RaDeleteWithConfirmButton: {
|
RaDeleteWithConfirmButton: {
|
||||||
deleteButton: {
|
deleteButton: {
|
||||||
color: 'unset',
|
color: '#fff',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
RaBulkDeleteWithUndoButton: {
|
||||||
|
deleteButton: {
|
||||||
|
color: '#fff',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
RaPaginationActions: {
|
RaPaginationActions: {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user