Compare commits

...

56 Commits

Author SHA1 Message Date
cafecitopuro
e4dc26b53e
Merge 192a706ea321ad30a32435d823d01be0c62da779 into 0c3012bbbdf232e3aeffd461ee05422e6f83829d 2025-11-20 02:42:08 -05:00
Deluan
0c3012bbbd chore(deps): update Go dependencies to latest versions
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-19 22:05:46 -05:00
Deluan
353aff2c88 fix(lastfm): ignore artist placeholder image.
Fix #4702

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-19 20:49:29 -05:00
Deluan
c873466e5b fix(scanner): reset watcher trigger timer for debounce on notification receipt
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-19 20:24:13 -05:00
Deluan Quintão
3d1946e31c
fix(plugins): avoid Chi RouteContext pollution by using http.NewRequest (#4713)
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-19 20:17:01 -05:00
Dongeun
6fb228bc10
fix(ui): fix translation display for library list terms (#4712) 2025-11-19 13:42:33 -05:00
Kendall Garner
32e1313fc6
ci: bump plugin compilation timeout for regressions (#4690) 2025-11-16 13:46:32 -05:00
Deluan
489d5c7760 test: update make test-race target to use PKG variable for improved flexibility
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-16 13:41:22 -05:00
Kendall Garner
0f1ede2581
fix(scanner): specify exact table to use for missing mediafile filter (#4689)
In `getAffectedAlbumIDs`, when one or more IDs is added, it adds a filter `"id": ids`.
This filter is ambiguous though, because the `getAll` query joins with library table, which _also_ has an `id` field.
Clarify this by adding the table name to the filter.

Note that this was not caught in testing, as it only uses mock db.
2025-11-16 12:54:28 -05:00
Deluan Quintão
395a36e10f
fix(ui): fix library selection state for single-library users (#4686)
* fix: validate library selection state for single-library users

Fixes issues where users with a single library see no content when
selectedLibraries in localStorage contains library IDs they no longer
have access to (e.g., after removing libraries or switching accounts).

Changes:
- libraryReducer: Validate selectedLibraries when SET_USER_LIBRARIES
  is dispatched, filtering out invalid IDs and resetting to empty for
  single-library users (empty means 'all accessible libraries')
- wrapperDataProvider: Add defensive validation in getSelectedLibraries
  to check against current user libraries before applying filters
- Add comprehensive test coverage for reducer validation logic

Fixes #4553, #4508, #4569

* style: format code with prettier
2025-11-15 17:42:28 -05:00
Deluan
0161a0958c fix(ui): add CreateButton back to LibraryListActions
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-15 17:31:37 -05:00
Deluan Quintão
28d5299ffc
feat(scanner): implement selective folder scanning and file system watcher improvements (#4674)
* feat: Add selective folder scanning capability

Implement targeted scanning of specific library/folder pairs without
full recursion. This enables efficient rescanning of individual folders
when changes are detected, significantly reducing scan time for large
libraries.

Key changes:
- Add ScanTarget struct and ScanFolders API to Scanner interface
- Implement CLI flag --targets for specifying libraryID:folderPath pairs
- Add FolderRepository.GetByPaths() for batch folder info retrieval
- Create loadSpecificFolders() for non-recursive directory loading
- Scope GC operations to affected libraries only (with TODO for full impl)
- Add comprehensive tests for selective scanning behavior

The selective scan:
- Only processes specified folders (no subdirectory recursion)
- Maintains library isolation
- Runs full maintenance pipeline scoped to affected libraries
- Supports both full and quick scan modes

Examples:
  navidrome scan --targets "1:Music/Rock,1:Music/Jazz"
  navidrome scan --full --targets "2:Classical"

* feat(folder): replace GetByPaths with GetFolderUpdateInfo for improved folder updates retrieval

Signed-off-by: Deluan <deluan@navidrome.org>

* test: update parseTargets test to handle folder names with spaces

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(folder): remove unused LibraryPath struct and update GC logging message

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(folder): enhance external scanner to support target-specific scanning

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): simplify scanner methods

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(watcher): implement folder scanning notifications with deduplication

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(watcher): add resolveFolderPath function for testability

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(watcher): implement path ignoring based on .ndignore patterns

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): implement IgnoreChecker for managing .ndignore patterns

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(ignore_checker): rename scanner to lineScanner for clarity

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): enhance ScanTarget struct with String method for better target representation

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(scanner): validate library ID to prevent negative values

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): simplify GC method by removing library ID parameter

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(scanner): update folder scanning to include all descendants of specified folders

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(subsonic): allow selective scan in the /startScan endpoint

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): update CallScan to handle specific library/folder pairs

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): streamline scanning logic by removing scanAll method

Signed-off-by: Deluan <deluan@navidrome.org>

* test: enhance mockScanner for thread safety and improve test reliability

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): move scanner.ScanTarget to model.ScanTarget

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor: move scanner types to model,implement MockScanner

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): update scanner interface and implementations to use model.Scanner

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(folder_repository): normalize target path handling by using filepath.Clean

Signed-off-by: Deluan <deluan@navidrome.org>

* test(folder_repository): add comprehensive tests for folder retrieval and child exclusion

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): simplify selective scan logic using slice.Filter

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): streamline phase folder and album creation by removing unnecessary library parameter

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): move initialization logic from phase_1 to the scanner itself

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(tests): rename selective scan test file to scanner_selective_test.go

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(configuration): add DevSelectiveWatcher configuration option

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(watcher): enhance .ndignore handling for folder deletions and file changes

Signed-off-by: Deluan <deluan@navidrome.org>

* docs(scanner): comments

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(scanner): enhance walkDirTree to support target folder scanning

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(scanner, watcher): handle errors when pushing ignore patterns for folders

Signed-off-by: Deluan <deluan@navidrome.org>

* Update scanner/phase_1_folders.go

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* refactor(scanner): replace parseTargets function with direct call to scanner.ParseTargets

Signed-off-by: Deluan <deluan@navidrome.org>

* test(scanner): add tests for ScanBegin and ScanEnd functionality

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(library): update PRAGMA optimize to check table sizes without ANALYZE

Signed-off-by: Deluan <deluan@navidrome.org>

* test(scanner): refactor tests

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(ui): add selective scan options and update translations

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(ui): add quick and full scan options for individual libraries

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(ui): add Scan buttonsto the LibraryList

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(scan): update scanning parameters from 'path' to 'target' for selective scans.

* refactor(scan): move ParseTargets function to model package

* test(scan): suppress unused return value from SetUserLibraries in tests

* feat(gc): enhance garbage collection to support selective library purging

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(scanner): prevent race condition when scanning deleted folders

When the watcher detects changes in a folder that gets deleted before
the scanner runs (due to the 10-second delay), the scanner was
prematurely removing these folders from the tracking map, preventing
them from being marked as missing.

The issue occurred because `newFolderEntry` was calling `popLastUpdate`
before verifying the folder actually exists on the filesystem.

Changes:
- Move fs.Stat check before newFolderEntry creation in loadDir to
  ensure deleted folders remain in lastUpdates for finalize() to handle
- Add early existence check in walkDirTree to skip non-existent target
  folders with a warning log
- Add unit test verifying non-existent folders aren't removed from
  lastUpdates prematurely
- Add integration test for deleted folder scenario with ScanFolders

Fixes the issue where deleting entire folders (e.g., /music/AC_DC)
wouldn't mark tracks as missing when using selective folder scanning.

* refactor(scan): streamline folder entry creation and update handling

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(scan): add '@Recycle' (QNAP) to ignored directories list

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(log): improve thread safety in logging level management

* test(scan): move unit tests for ParseTargets function

Signed-off-by: Deluan <deluan@navidrome.org>

* review

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: deluan <deluan.quintao@mechanical-orchard.com>
2025-11-14 22:15:43 -05:00
Deluan
bca76069c3 fix(server): prioritize artist base image filenames over numeric suffixes and add tests for sorting
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-14 13:15:50 -05:00
Deluan Quintão
a10f839221
fix(server): prefer cover.jpg over cover.1.jpg (#4684)
* fix(reader): prioritize cover art selection by base filename without numeric suffixes

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(reader): update image file comparison to use natural sorting and prioritize files without numeric suffixes

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(reader): simplify comparison, add case-sensitivity test case

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-14 12:19:10 -05:00
Deluan
2385c8a548 test: mock formatFullDate for consistent test results 2025-11-13 18:46:06 -05:00
Deluan
9b3bdc8a8b fix(ui): adjust margins for bulk actions buttons in Spotify-ish and Ligera
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-13 18:30:44 -05:00
Deluan
f939ad84f3 fix(ui): increase contrast of button text in the Dark theme
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-12 16:17:41 -05:00
Deluan
c3e8c67116 feat(ui): update totalSize formatting to display two decimal places
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-12 13:23:18 -05:00
Deluan
d57a8e6d84 refactor(scanner): refactor legacyReleaseDate logic and add tests for date mapping
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-12 13:11:33 -05:00
Deluan
73ec89e1af feat(ui): add SizeField to display total size in LibraryList
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-12 13:01:17 -05:00
Rob Emery
131c0c565c
feat(insights): detecting packaging method (#3841)
* Adding environmental variable so that navidrome can detect
if its running as an MSI install for insights

* Renaming to be ND_PACKAGE_TYPE so we can reuse this for the
.deb/.rpm stats as well

* Packaged implies a bool, this is a description so it should
be packaging or just package imo

* wixl currently doesn't support <Environment> so I'm swapping out
to a file next-door to the configuration file, we should be
able to reuse this for deb/rpm as well

* Using a file we should be able to add support for linux like this
also

* MSI should copy the package into place for us, it's not a KeyPath
as older versions won't have it, so it's presence doesn't indicate
the installed status of the package

* OK this doesn't exist, need to find another way to do it

* package to .package and moving to the datadir

* fix(scanner): better log message when AutoImportPlaylists is disabled

Fix #3861

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(scanner): support ID3v2 embedded images in WAV files

Fix #3867

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(ui): show bitDepth in song info dialog

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(server): don't break if the ND_CONFIGFILE does not exist

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(docker): automatically loads a navidrome.toml file from /data, if available

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(server): custom ArtistJoiner config (#3873)

* feat(server): custom ArtistJoiner config

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor(ui): organize ArtistLinkField, add tests

Signed-off-by: Deluan <deluan@navidrome.org>

* feat(ui): use display artist

* feat(ui): use display artist

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>

* chore: remove some BFR-related TODOs that are not valid anymore

Signed-off-by: Deluan <deluan@navidrome.org>

* chore: remove more outdated TODOs

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(scanner): elapsed time for folder processing is wrong in the logs

Signed-off-by: Deluan <deluan@navidrome.org>

* Should be able to reuse this mechanism with deb and rpm, I think
it would be nice to know which specific one it is without guessing
based on /etc/debian_version or something; but it doesn't look like
that is exposed by goreleaser into an env or anything :/

* Need to reference the installed file and I think Id's don't require []

* Need to add into the root directory for this to work

* That was not deliberately removed

* feat: add RPM and DEB package configuration files for Navidrome

Signed-off-by: Deluan <deluan@navidrome.org>

* Don't need this as goreleaser will sort it out

---------

Signed-off-by: Deluan <deluan@navidrome.org>
Co-authored-by: Deluan Quintão <deluan@navidrome.org>
2025-11-09 12:57:55 -05:00
Kendall Garner
53ff33866d
feat(subsonic): implement indexBasedQueue extension (#4244)
* redo this whole PR, but clearner now that better errata is in

* update play queue types
2025-11-09 12:52:05 -05:00
Deluan
508670ecfb Revert "feat(ui): add Vietnamese localization for the application"
This reverts commit 9621a40f29a507b1e450da31a32134cdc7a9cf2a.
2025-11-09 12:41:25 -05:00
Deluan
c369224597 test: fix flaky CacheWarmer deduplication test
Fixed race condition in the 'deduplicates items in buffer' test where the
background worker goroutine could process and clear the buffer before the
test could verify its contents. Added fc.SetReady(false) to keep the cache
unavailable during the test, ensuring buffered items remain in memory for
verification. This matches the pattern already used in the 'adds multiple
items to buffer' test.

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-09 12:19:28 -05:00
Deluan
ff583970f0 chore(deps): update golang.org/x/sync to v0.18.0 and golang.org/x/sys to v0.38.0
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-08 21:05:29 -05:00
Deluan
38ca65726a chore(deps): update wazero to version 1.10.0 and clean up go.mod
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-08 21:04:20 -05:00
Deluan Quintão
5ce6e16d96
fix: album statistics not updating after deleting missing files (#4668)
* feat: add album refresh functionality after deleting missing files

Implemented RefreshAlbums method in AlbumRepository to recalculate album attributes (size, duration, song count) from their constituent media files. This method processes albums in batches to maintain efficiency with large datasets.

Added integration in deleteMissingFiles to automatically refresh affected albums in the background after deleting missing media files, ensuring album statistics remain accurate. Includes comprehensive test coverage for various scenarios including single/multiple albums, empty batches, and large batch processing.

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor: extract missing files deletion into reusable service layer

Extracted inline deletion logic from server/nativeapi/missing.go into a new core.MissingFiles service interface and implementation. This provides better separation of concerns and testability.

The MissingFiles service handles:
- Deletion of specific or all missing files via transaction
- Garbage collection after deletion
- Extraction of affected album IDs from missing files
- Background refresh of artist and album statistics

The deleteMissingFiles HTTP handler now simply delegates to the service, removing 70+ lines of inline logic. All deletion, transaction, and stat refresh logic is now centralized in core/missing_files.go.

Updated dependency injection to provide MissingFiles service to the native API router. Renamed receiver variable from 'n' to 'api' throughout native_api.go for consistency.

* refactor: consolidate maintenance operations into unified service

Consolidate MissingFiles and RefreshAlbums functionality into a new Maintenance service. This refactoring:
- Creates core.Maintenance interface combining DeleteMissingFiles, DeleteAllMissingFiles, and RefreshAlbums methods
- Moves RefreshAlbums logic from AlbumRepository persistence layer to core Maintenance service
- Removes MissingFiles interface and moves its implementation to maintenanceService
- Updates all references in wire providers, native API router, and handlers
- Removes RefreshAlbums interface method from AlbumRepository model
- Improves separation of concerns by centralizing maintenance operations in the core domain

This change provides a cleaner API and better organization of maintenance-related database operations.

* refactor: remove MissingFiles interface and update references

Remove obsolete MissingFiles interface and its references:
- Delete core/missing_files.go and core/missing_files_test.go
- Remove RefreshAlbums method from AlbumRepository interface and implementation
- Remove RefreshAlbums tests from AlbumRepository test suite
- Update wire providers to use NewMaintenance instead of NewMissingFiles
- Update native API router to use Maintenance service
- Update missing.go handler to use Maintenance interface

All functionality is now consolidated in the core.Maintenance service.

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor: rename RefreshAlbums to refreshAlbums and update related calls

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor: optimize album refresh logic and improve test coverage

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor: simplify logging setup in tests with reusable LogHook function

Signed-off-by: Deluan <deluan@navidrome.org>

* refactor: add synchronization to logger and maintenance service for thread safety

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-08 20:11:00 -05:00
Deluan Quintão
69527085db
fix(ui): resolve transparent dropdown background in Ligera theme (#4665)
Fixed the multi-library selector dropdown background in the Ligera theme by changing the palette.background.paper value from 'inherit' to bLight['500'] ('#ffffff'). This ensures the dropdown has a solid white background that properly overlays content, making the library selection options clearly readable.

Closes #4502
2025-11-08 12:47:02 -05:00
Nagi
9bb933c0d6
fix(ui): fix Playlist Italian translation(#4642)
In Italian, we usually use "Playlist" rather than "Scalette/a". "Scalette/a" refers to other functions or objects.
2025-11-07 18:41:23 -05:00
Deluan Quintão
6f4fa76772
fix(ui): update Galician, Dutch, Thai translations from POEditor (#4416)
Co-authored-by: navidrome-bot <navidrome-bot@navidrome.org>
2025-11-07 18:20:39 -05:00
Deluan
9621a40f29 feat(ui): add Vietnamese localization for the application 2025-11-07 18:13:46 -05:00
DDinghoya
df95dffa74
fix(ui): update ko.json (#4443)
* Update ko.json

* Update ko.json

Removed remove one of the entrie as below

"shuffleAll": "모두 셔플"

* Update ko.json

* Update ko.json

* Update ko.json

* Update ko.json

* Update ko.json
2025-11-07 18:10:38 -05:00
York
a59b59192a
fix(ui): update zh-Hant.json (#4454)
* Update zh-Hant.json

Updated and optimized Traditional Chinese translation.

* Update zh-Hant.json

Updated and optimized Traditional Chinese translation.

* Update zh-Hant.json

Updated and optimized Traditional Chinese translation.
2025-11-07 18:06:41 -05:00
Deluan Quintão
4f7dc105b0
fix(ui): correct track ordering when sorting playlists by album (#4657)
* fix(deps): update wazero dependencies to resolve issues

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(deps): update wazero dependency to latest version

Signed-off-by: Deluan <deluan@navidrome.org>

* fix: correct track ordering when sorting playlists by album

Fixed issue #3177 where tracks within multi-disc albums were displayed out of order when sorting playlists by album. The playlist track repository was using an incomplete sort mapping that only sorted by album name and artist, missing the critical disc_number and track_number fields.

Changed the album sort mapping in playlist_track_repository from:
  order_album_name, order_album_artist_name
to:
  order_album_name, order_album_artist_name, disc_number, track_number, order_artist_name, title

This now matches the sorting used in the media file repository, ensuring tracks are sorted by:
1. Album name (groups by album)
2. Album artist (handles compilations)
3. Disc number (multi-disc album discs in order)
4. Track number (tracks within disc in order)
5. Artist name and title (edge cases with missing metadata)

Added comprehensive tests with a multi-disc test album to verify correct sorting behavior.

* chore: sync go.mod and go.sum with master

* chore: align playlist album sort order with mediafile_repository (use album_id)

* fix: clean up test playlist to prevent state leakage in randomized test runs

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-06 16:50:54 -05:00
Deluan Quintão
e918e049e2
fix: update wazero dependency to resolve ARM64 SIGILL crash (#4655)
* fix(deps): update wazero dependencies to resolve issues

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(deps): update wazero dependency to latest version

Signed-off-by: Deluan <deluan@navidrome.org>

* fix(deps): update wazero dependency to latest version for issue resolution

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-06 15:07:09 -05:00
Deluan Quintão
1e8d28ff46
fix: qualify user id filter to avoid ambiguous column (#4511) 2025-11-06 14:54:01 -05:00
Kendall Garner
a128b3cf98
fix(db): make playqueue position field an integer (#4481) 2025-11-06 14:41:09 -05:00
Deluan Quintão
290a9fdeaa
test: fix locale-dependent tests by making formatNumber locale-aware (#4619)
- Add optional locale parameter to formatNumber function
- Update tests to explicitly pass 'en-US' locale for deterministic results
- Maintains backward compatibility: defaults to system locale when no locale specified
- No need for cross-env or environment variable manipulation
- Tests now pass consistently regardless of system locale

Related to #4417
2025-11-06 14:34:00 -05:00
Deluan
58b5ed86df refactor: extract TruncateRunes function for safe string truncation with suffix
Signed-off-by: Deluan <deluan@navidrome.org>

# Conflicts:
#	core/share.go
#	core/share_test.go
2025-11-06 14:27:38 -05:00
beerpsi
fe1cee0159
fix(share): slice content label by utf-8 runes (#4634)
* fix(share): slice content label by utf-8 runes

* Apply suggestions about avoiding allocations

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>

* lint: remove unused import

* test: add test cases for CJK truncation

* test: add tests for ASCII labels too

---------

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-06 14:24:07 -05:00
Deluan
3dfaa8cca1 ci: go mod tidy
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-06 12:53:41 -05:00
Deluan
0a5abfc1b1 chore: update actions/upload-artifact and actions/download-artifact to latest versions
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-06 12:43:35 -05:00
Deluan
c501bc6996 chore(deps): update ginkgo to version 2.27.2
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-06 12:41:16 -05:00
Deluan
0c71842b12 chore: update Go version to 1.25.4
Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-06 12:40:44 -05:00
pca006132
e86dc03619
fix(ui): allow scrolling in play queue by adding delay (#4562) 2025-11-01 20:47:03 -04:00
Deluan Quintão
775626e037
refactor(scanner): optimize update artist's statistics using normalized media_file_artists table (#4641)
Optimized to use the normalized media_file_artists table instead of parsing JSONB

Signed-off-by: Deluan <deluan@navidrome.org>
2025-11-01 20:25:33 -04:00
Deluan Quintão
91fab68578
fix: handle UTF BOM in lyrics and playlist files (#4637)
* fix: handle UTF-8 BOM in lyrics and playlist files

Added UTF-8 BOM (Byte Order Mark) detection and stripping for external lyrics files and playlist files. This ensures that files with BOM markers are correctly parsed and recognized as synced lyrics or valid playlists.

The fix introduces a new ioutils package with UTF8Reader and UTF8ReadFile functions that automatically detect and remove UTF-8, UTF-16 LE, and UTF-16 BE BOMs. These utilities are now used when reading external lyrics and playlist files to ensure consistent parsing regardless of BOM presence.

Added comprehensive tests for BOM handling in both lyrics and playlists, including test fixtures with actual BOM markers to verify correct behavior.

* test: add test for UTF-16 LE encoded LRC files

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-10-31 09:07:23 -04:00
deluan
0bdd3e6f8b fix(ui): fix Ligera theme's RaPaginationActions contrast 2025-10-30 16:34:31 -04:00
Konstantin Morenko
465846c1bc
fix(ui): fix color of MuiIconButton in Gruvbox Dark theme (#4585)
* Fixed color of MuiIconButton in gruvboxDark.js

* Update ui/src/themes/gruvboxDark.js

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>

---------

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-10-29 09:14:40 -04:00
Deluan Quintão
cce11c5416
fix(scanner): restore basic tag extraction fallback mechanism for improved metadata parsing (#4401)
* feat: add basic tag extraction fallback mechanism

Added basic tag extraction from TagLib's generic Tag interface as a fallback
when PropertyMap doesn't contain standard metadata fields. This ensures that
essential tags like title, artist, album, comment, genre, year, and track
are always available even when they're not present in format-specific
property maps.

Changes include:
- Extract basic tags (__title, __artist, etc.) in C++ wrapper
- Add parseBasicTag function to process basic tags in Go extractor
- Refactor parseProp function to be reusable across property parsing
- Ensure basic tags are preferred over PropertyMap when available

* feat(taglib): update tag parsing to use double underscores for properties

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-10-26 19:38:34 -04:00
Deluan Quintão
d021289279
fix: enable multi-valued releasetype in smart playlists (#4621)
* fix: prevent infinite loop in Type filter autocomplete

Fixed an infinite loop issue in the album Type filter caused by an inline
arrow function in the optionText prop. The inline function created a new
reference on every render, causing React-Admin's AutocompleteInput to
continuously re-fetch data from the /api/tag endpoint.

The solution extracts the formatting function outside the component scope
as formatReleaseType, ensuring a stable function reference across renders.
This prevents unnecessary re-renders and API calls while maintaining the
humanized display format for release type values.

* fix: enable multi-valued releasetype in smart playlists

Smart playlists can now match all values in multi-valued releasetype tags.
Previously, the albumtype field was mapped to the single-valued mbz_album_type
database field, which only stored the first value from tags like album; soundtrack.
This prevented smart playlists from matching albums with secondary release types
like soundtrack, live, or compilation when tagged by MusicBrainz Picard.

The fix removes the direct database field mapping and allows both albumtype and
releasetype to use the multi-valued tag system. The albumtype field is now an
alias that points to the releasetype tag field, ensuring both query the same
JSON path in the tags column. This maintains backward compatibility with the
documented albumtype field while enabling proper multi-value tag matching.

Added tests to verify both releasetype and albumtype correctly generate
multi-valued tag queries.

Fixes #4616

* fix: resolve albumtype alias for all operators and sorting

Codex correctly identified that the initial fix only worked for Contains/StartsWith/EndsWith operators. The alias resolution was happening too late in the code path.

Fixed by resolving the alias in two places:
1. tagCond.ToSql() - now uses the actual field name (releasetype) in the JSON path
2. Criteria.OrderBy() - now uses the actual field name when building sort expressions

Added tests for Is/IsNot operators and sorting to ensure complete coverage.
2025-10-26 19:36:44 -04:00
Daniele Ricci
aa7f55646d
build(docker): use standalone wget instead of the busybox one, fix #4473
wget in busybox doesn't support redirects (required for downloading
artifacts from GitHub)
2025-10-25 17:47:09 -04:00
Deluan
925bfafc1f build: enhance golangci-lint installation process to check version and reinstall if necessary 2025-10-25 17:42:33 -04:00
Deluan
e24f7984cc chore(deps-dev): update happy-dom to version 20.0.8
Signed-off-by: Deluan <deluan@navidrome.org>
2025-10-25 17:25:52 -04:00
dependabot[bot]
ac3e6ae6a5
chore(deps-dev): bump brace-expansion from 1.1.11 to 1.1.12 in /ui (#4217)
Bumps [brace-expansion](https://github.com/juliangruber/brace-expansion) from 1.1.11 to 1.1.12.
- [Release notes](https://github.com/juliangruber/brace-expansion/releases)
- [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12)

---
updated-dependencies:
- dependency-name: brace-expansion
  dependency-version: 1.1.12
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Deluan Quintão <deluan@navidrome.org>
2025-10-25 17:24:31 -04:00
Deluan Quintão
b2019da999
chore(deps): update all dependencies (#4618)
* chore: update to Go 1.25.3

Signed-off-by: Deluan <deluan@navidrome.org>

* chore: update to golangci-lint

Signed-off-by: Deluan <deluan@navidrome.org>

* chore: update go dependencies

Signed-off-by: Deluan <deluan@navidrome.org>

* chore: update vite dependencies in package.json and improve EventSource mock in tests

- Upgraded @vitejs/plugin-react to version 5.1.0 and @vitest/coverage-v8 to version 4.0.3.
- Updated vite to version 7.1.12 and vite-plugin-pwa to version 1.1.0.
- Enhanced the EventSource mock implementation in eventStream.test.js for better test isolation.

* ci: remove coverage flag from Go test command in pipeline

* chore: update Node.js version to v24 in devcontainer, pipeline, and .nvmrc

* chore: prettier

Signed-off-by: Deluan <deluan@navidrome.org>

* chore: update actions/checkout from v4 to v5 in pipeline and update-translations workflows

* chore: update JS dependencies remove unused jest-dom import in Linkify.test.jsx

* chore: update actions/download-artifact from v4 to v5 in pipeline

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2025-10-25 17:05:16 -04:00
154 changed files with 7165 additions and 2140 deletions

View File

@ -4,10 +4,10 @@
"dockerfile": "Dockerfile",
"args": {
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
"VARIANT": "1.24",
"VARIANT": "1.25",
// Options
"INSTALL_NODE": "true",
"NODE_VERSION": "v20"
"NODE_VERSION": "v24"
}
},
"workspaceMount": "",

View File

@ -25,7 +25,7 @@ jobs:
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0
fetch-tags: true
@ -63,7 +63,7 @@ jobs:
name: Lint Go code
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Download TagLib
uses: ./.github/actions/download-taglib
@ -93,7 +93,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Download TagLib
uses: ./.github/actions/download-taglib
@ -106,7 +106,7 @@ jobs:
- name: Test
run: |
pkg-config --define-prefix --cflags --libs taglib # for debugging
go test -shuffle=on -tags netgo -race -cover ./... -v
go test -shuffle=on -tags netgo -race ./... -v
js:
name: Test JS code
@ -114,10 +114,10 @@ jobs:
env:
NODE_OPTIONS: "--max_old_space_size=4096"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v5
- uses: actions/setup-node@v6
with:
node-version: 20
node-version: 24
cache: "npm"
cache-dependency-path: "**/package-lock.json"
@ -145,7 +145,7 @@ jobs:
name: Lint i18n files
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- run: |
set -e
for file in resources/i18n/*.json; do
@ -191,7 +191,7 @@ jobs:
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Prepare Docker Buildx
uses: ./.github/actions/prepare-docker
@ -217,7 +217,7 @@ jobs:
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
- name: Upload Binaries
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: navidrome-${{ env.PLATFORM }}
path: ./output
@ -248,7 +248,7 @@ jobs:
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
with:
name: digests-${{ env.PLATFORM }}
@ -264,10 +264,10 @@ jobs:
env:
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Download digests
uses: actions/download-artifact@v4
uses: actions/download-artifact@v6
with:
path: /tmp/digests
pattern: digests-*
@ -318,9 +318,9 @@ jobs:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v6
with:
path: ./binaries
pattern: navidrome-windows*
@ -339,7 +339,7 @@ jobs:
du -h binaries/msi/*.msi
- name: Upload MSI files
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: navidrome-windows-installers
path: binaries/msi/*.msi
@ -352,12 +352,12 @@ jobs:
outputs:
package_list: ${{ steps.set-package-list.outputs.package_list }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0
fetch-tags: true
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v6
with:
path: ./binaries
pattern: navidrome-*
@ -383,7 +383,7 @@ jobs:
rm ./dist/*.tar.gz ./dist/*.zip
- name: Upload all-packages artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: packages
path: dist/navidrome_0*
@ -406,13 +406,13 @@ jobs:
item: ${{ fromJson(needs.release.outputs.package_list) }}
steps:
- name: Download all-packages artifact
uses: actions/download-artifact@v4
uses: actions/download-artifact@v6
with:
name: packages
path: ./dist
- name: Upload all-packages artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: navidrome_linux_${{ matrix.item }}
path: dist/navidrome_0*_linux_${{ matrix.item }}

View File

@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'navidrome' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Get updated translations
id: poeditor
env:

2
.nvmrc
View File

@ -1 +1 @@
v20
v24

View File

@ -31,7 +31,9 @@ ARG TARGETPLATFORM
ARG CROSS_TAGLIB_VERSION=2.1.1-1
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
# wget in busybox can't follow redirects
RUN <<EOT
apk add --no-cache wget
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
FILE=taglib-${PLATFORM}.tar.gz
@ -61,7 +63,7 @@ COPY --from=ui /build /build
########################################################################################################################
### Build Navidrome binary
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.24-bookworm AS base
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.25-bookworm AS base
RUN apt-get update && apt-get install -y clang lld
COPY --from=xx / /
WORKDIR /workspace

View File

@ -16,6 +16,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
CROSS_TAGLIB_VERSION ?= 2.1.1-1
GOLANGCI_LINT_VERSION ?= v2.5.0
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
@ -53,7 +54,7 @@ testall: test-race test-i18n test-js ##@Development Run Go and JS tests
.PHONY: testall
test-race: ##@Development Run Go tests with race detector
go test -tags netgo -race -shuffle=on ./...
go test -tags netgo -race -shuffle=on $(PKG)
.PHONY: test-race
test-js: ##@Development Run JS tests
@ -65,7 +66,22 @@ test-i18n: ##@Development Validate all translations files
.PHONY: test-i18n
install-golangci-lint: ##@Development Install golangci-lint if not present
@PATH=$$PATH:./bin which golangci-lint > /dev/null || (echo "Installing golangci-lint..." && curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s v2.1.6)
@INSTALL=false; \
if PATH=$$PATH:./bin which golangci-lint > /dev/null 2>&1; then \
CURRENT_VERSION=$$(PATH=$$PATH:./bin golangci-lint version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -n1); \
REQUIRED_VERSION=$$(echo "$(GOLANGCI_LINT_VERSION)" | sed 's/^v//'); \
if [ "$$CURRENT_VERSION" != "$$REQUIRED_VERSION" ]; then \
echo "Found golangci-lint $$CURRENT_VERSION, but $$REQUIRED_VERSION is required. Reinstalling..."; \
rm -f ./bin/golangci-lint; \
INSTALL=true; \
fi; \
else \
INSTALL=true; \
fi; \
if [ "$$INSTALL" = "true" ]; then \
echo "Installing golangci-lint $(GOLANGCI_LINT_VERSION)..."; \
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s $(GOLANGCI_LINT_VERSION); \
fi
.PHONY: install-golangci-lint
lint: install-golangci-lint ##@Development Lint Go code

View File

@ -43,23 +43,21 @@ func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
// Parse audio properties
ap := metadata.AudioProperties{}
if length, ok := tags["_lengthinmilliseconds"]; ok && len(length) > 0 {
millis, _ := strconv.Atoi(length[0])
if millis > 0 {
ap.Duration = (time.Millisecond * time.Duration(millis)).Round(time.Millisecond * 10)
}
delete(tags, "_lengthinmilliseconds")
}
parseProp := func(prop string, target *int) {
if value, ok := tags[prop]; ok && len(value) > 0 {
*target, _ = strconv.Atoi(value[0])
delete(tags, prop)
}
}
parseProp("_bitrate", &ap.BitRate)
parseProp("_channels", &ap.Channels)
parseProp("_samplerate", &ap.SampleRate)
parseProp("_bitspersample", &ap.BitDepth)
ap.BitRate = parseProp(tags, "__bitrate")
ap.Channels = parseProp(tags, "__channels")
ap.SampleRate = parseProp(tags, "__samplerate")
ap.BitDepth = parseProp(tags, "__bitspersample")
length := parseProp(tags, "__lengthinmilliseconds")
ap.Duration = (time.Millisecond * time.Duration(length)).Round(time.Millisecond * 10)
// Extract basic tags
parseBasicTag(tags, "__title", "title")
parseBasicTag(tags, "__artist", "artist")
parseBasicTag(tags, "__album", "album")
parseBasicTag(tags, "__comment", "comment")
parseBasicTag(tags, "__genre", "genre")
parseBasicTag(tags, "__year", "year")
parseBasicTag(tags, "__track", "tracknumber")
// Parse track/disc totals
parseTuple := func(prop string) {
@ -107,6 +105,31 @@ var tiplMapping = map[string]string{
"DJ-mix": "djmixer",
}
// parseProp parses a property from the tags map and sets it to the target integer.
// It also deletes the property from the tags map after parsing.
func parseProp(tags map[string][]string, prop string) int {
if value, ok := tags[prop]; ok && len(value) > 0 {
v, _ := strconv.Atoi(value[0])
delete(tags, prop)
return v
}
return 0
}
// parseBasicTag checks if a basic tag (like __title, __artist, etc.) exists in the tags map.
// If it does, it moves the value to a more appropriate tag name (like title, artist, etc.),
// and deletes the basic tag from the map. If the target tag already exists, it ignores the basic tag.
func parseBasicTag(tags map[string][]string, basicName string, tagName string) {
basicValue := tags[basicName]
if len(basicValue) == 0 {
return
}
delete(tags, basicName)
if len(tags[tagName]) == 0 {
tags[tagName] = basicValue
}
}
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
//
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".

View File

@ -45,31 +45,63 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
// Add audio properties to the tags
const TagLib::AudioProperties *props(f.audioProperties());
goPutInt(id, (char *)"_lengthinmilliseconds", props->lengthInMilliseconds());
goPutInt(id, (char *)"_bitrate", props->bitrate());
goPutInt(id, (char *)"_channels", props->channels());
goPutInt(id, (char *)"_samplerate", props->sampleRate());
goPutInt(id, (char *)"__lengthinmilliseconds", props->lengthInMilliseconds());
goPutInt(id, (char *)"__bitrate", props->bitrate());
goPutInt(id, (char *)"__channels", props->channels());
goPutInt(id, (char *)"__samplerate", props->sampleRate());
// Extract bits per sample for supported formats
int bitsPerSample = 0;
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", apeProperties->bitsPerSample());
if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", asfProperties->bitsPerSample());
bitsPerSample = apeProperties->bitsPerSample();
else if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
bitsPerSample = asfProperties->bitsPerSample();
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", flacProperties->bitsPerSample());
bitsPerSample = flacProperties->bitsPerSample();
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", mp4Properties->bitsPerSample());
bitsPerSample = mp4Properties->bitsPerSample();
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", wavePackProperties->bitsPerSample());
bitsPerSample = wavePackProperties->bitsPerSample();
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", aiffProperties->bitsPerSample());
bitsPerSample = aiffProperties->bitsPerSample();
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", wavProperties->bitsPerSample());
bitsPerSample = wavProperties->bitsPerSample();
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
goPutInt(id, (char *)"_bitspersample", dsfProperties->bitsPerSample());
bitsPerSample = dsfProperties->bitsPerSample();
if (bitsPerSample > 0) {
goPutInt(id, (char *)"__bitspersample", bitsPerSample);
}
// Send all properties to the Go map
TagLib::PropertyMap tags = f.file()->properties();
// Make sure at least the basic properties are extracted
TagLib::Tag *basic = f.file()->tag();
if (!basic->isEmpty()) {
if (!basic->title().isEmpty()) {
tags.insert("__title", basic->title());
}
if (!basic->artist().isEmpty()) {
tags.insert("__artist", basic->artist());
}
if (!basic->album().isEmpty()) {
tags.insert("__album", basic->album());
}
if (!basic->comment().isEmpty()) {
tags.insert("__comment", basic->comment());
}
if (!basic->genre().isEmpty()) {
tags.insert("__genre", basic->genre());
}
if (basic->year() > 0) {
tags.insert("__year", TagLib::String::number(basic->year()));
}
if (basic->track() > 0) {
tags.insert("__track", TagLib::String::number(basic->track()));
}
}
TagLib::ID3v2::Tag *id3Tags = NULL;
// Get some extended/non-standard ID3-only tags (ex: iTunes extended frames)

View File

@ -4,10 +4,12 @@ import (
"context"
"encoding/gob"
"os"
"strings"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/utils/pl"
@ -17,11 +19,13 @@ import (
var (
fullScan bool
subprocess bool
targets string
)
func init() {
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
scanCmd.Flags().StringVarP(&targets, "targets", "t", "", "comma-separated list of libraryID:folderPath pairs (e.g., \"1:Music/Rock,1:Music/Jazz,2:Classical\")")
rootCmd.AddCommand(scanCmd)
}
@ -68,7 +72,18 @@ func runScanner(ctx context.Context) {
ds := persistence.New(sqlDB)
pls := core.NewPlaylists(ds)
progress, err := scanner.CallScan(ctx, ds, pls, fullScan)
// Parse targets if provided
var scanTargets []model.ScanTarget
if targets != "" {
var err error
scanTargets, err = model.ParseTargets(strings.Split(targets, ","))
if err != nil {
log.Fatal(ctx, "Failed to parse targets", err)
}
log.Info(ctx, "Scanning specific folders", "numTargets", len(scanTargets))
}
progress, err := scanner.CallScan(ctx, ds, pls, fullScan, scanTargets)
if err != nil {
log.Fatal(ctx, "Failed to scan", err)
}

View File

@ -69,10 +69,11 @@ func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, scannerScanner)
library := core.NewLibrary(dataStore, scannerScanner, watcher, broker)
router := nativeapi.New(dataStore, share, playlists, insights, library)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, modelScanner)
library := core.NewLibrary(dataStore, modelScanner, watcher, broker)
maintenance := core.NewMaintenance(dataStore)
router := nativeapi.New(dataStore, share, playlists, insights, library, maintenance)
return router
}
@ -94,10 +95,10 @@ func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
playbackServer := playback.GetInstance(dataStore)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, modelScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
return router
}
@ -149,7 +150,7 @@ func CreatePrometheus() metrics.Metrics {
return metricsMetrics
}
func CreateScanner(ctx context.Context) scanner.Scanner {
func CreateScanner(ctx context.Context) model.Scanner {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
@ -162,8 +163,8 @@ func CreateScanner(ctx context.Context) scanner.Scanner {
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
return scannerScanner
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
return modelScanner
}
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
@ -179,8 +180,8 @@ func CreateScanWatcher(ctx context.Context) scanner.Watcher {
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, scannerScanner)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, modelScanner)
return watcher
}
@ -201,7 +202,7 @@ func getPluginManager() plugins.Manager {
// wire_injectors.go:
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Scanner), new(scanner.Scanner)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
func GetPluginManager(ctx context.Context) plugins.Manager {
manager := getPluginManager()

View File

@ -45,7 +45,6 @@ var allProviders = wire.NewSet(
wire.Bind(new(agents.PluginLoader), new(plugins.Manager)),
wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)),
wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)),
wire.Bind(new(core.Scanner), new(scanner.Scanner)),
wire.Bind(new(core.Watcher), new(scanner.Watcher)),
)
@ -103,7 +102,7 @@ func CreatePrometheus() metrics.Metrics {
))
}
func CreateScanner(ctx context.Context) scanner.Scanner {
func CreateScanner(ctx context.Context) model.Scanner {
panic(wire.Build(
allProviders,
))

View File

@ -125,6 +125,7 @@ type configOptions struct {
DevAlbumInfoTimeToLive time.Duration
DevExternalScanner bool
DevScannerThreads uint
DevSelectiveWatcher bool
DevInsightsInitialDelay time.Duration
DevEnablePlayerInsights bool
DevEnablePluginsInsights bool
@ -600,6 +601,7 @@ func setViperDefaults() {
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
viper.SetDefault("devexternalscanner", true)
viper.SetDefault("devscannerthreads", 5)
viper.SetDefault("devselectivewatcher", true)
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
viper.SetDefault("devenableplayerinsights", true)
viper.SetDefault("devenablepluginsinsights", true)

View File

@ -38,6 +38,7 @@ type lastfmAgent struct {
secret string
lang string
client *client
httpClient httpDoer
getInfoMutex sync.Mutex
}
@ -56,6 +57,7 @@ func lastFMConstructor(ds model.DataStore) *lastfmAgent {
Timeout: consts.DefaultHttpClientTimeOut,
}
chc := cache.NewHTTPClient(hc, consts.DefaultHttpClientTimeOut)
l.httpClient = chc
l.client = newClient(l.apiKey, l.secret, l.lang, chc)
return l
}
@ -190,13 +192,13 @@ func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbi
return res, nil
}
var artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
var (
artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
artistIgnoredImage = "2a96cbd8b46e442fc41c2b86b821562f" // Last.fm artist placeholder image name
)
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
hc := http.Client{
Timeout: consts.DefaultHttpClientTimeOut,
}
a, err := l.callArtistGetInfo(ctx, name)
if err != nil {
return nil, fmt.Errorf("get artist info: %w", err)
@ -205,7 +207,7 @@ func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string)
if err != nil {
return nil, fmt.Errorf("create artist image request: %w", err)
}
resp, err := hc.Do(req)
resp, err := l.httpClient.Do(req)
if err != nil {
return nil, fmt.Errorf("get artist url: %w", err)
}
@ -222,11 +224,16 @@ func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string)
return res, nil
}
for _, attr := range n.Attr {
if attr.Key == "content" {
res = []agents.ExternalImage{
{URL: attr.Val},
}
break
if attr.Key != "content" {
continue
}
if strings.Contains(attr.Val, artistIgnoredImage) {
log.Debug(ctx, "Artist image is ignored default image", "name", name, "url", attr.Val)
return res, nil
}
res = []agents.ExternalImage{
{URL: attr.Val},
}
}
return res, nil

View File

@ -393,4 +393,73 @@ var _ = Describe("lastfmAgent", func() {
})
})
})
Describe("GetArtistImages", func() {
var agent *lastfmAgent
var apiClient *tests.FakeHttpClient
var httpClient *tests.FakeHttpClient
BeforeEach(func() {
apiClient = &tests.FakeHttpClient{}
httpClient = &tests.FakeHttpClient{}
client := newClient("API_KEY", "SECRET", "pt", apiClient)
agent = lastFMConstructor(ds)
agent.client = client
agent.httpClient = httpClient
})
It("returns the artist image from the page", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.html")
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).ToNot(HaveOccurred())
Expect(images).To(HaveLen(1))
Expect(images[0].URL).To(Equal("https://lastfm.freetls.fastly.net/i/u/ar0/818148bf682d429dc21b59a73ef6f68e.png"))
})
It("returns empty list if image is the ignored default image", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.ignored.html")
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).ToNot(HaveOccurred())
Expect(images).To(BeEmpty())
})
It("returns empty list if page has no meta tags", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.no_meta.html")
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).ToNot(HaveOccurred())
Expect(images).To(BeEmpty())
})
It("returns error if API call fails", func() {
apiClient.Err = errors.New("api error")
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("get artist info"))
})
It("returns error if scraper call fails", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
httpClient.Err = errors.New("scraper error")
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("get artist url"))
})
})
})

View File

@ -90,6 +90,7 @@ var _ = Describe("CacheWarmer", func() {
})
It("deduplicates items in buffer", func() {
fc.SetReady(false) // Make cache unavailable so items stay in buffer
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
cw.PreCache(model.MustParseArtworkID("al-1"))
cw.PreCache(model.MustParseArtworkID("al-1"))

View File

@ -1,6 +1,7 @@
package artwork
import (
"cmp"
"context"
"crypto/md5"
"fmt"
@ -11,6 +12,7 @@ import (
"time"
"github.com/Masterminds/squirrel"
"github.com/maruel/natural"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/external"
@ -116,8 +118,30 @@ func loadAlbumFoldersPaths(ctx context.Context, ds model.DataStore, albums ...mo
}
// Sort image files to ensure consistent selection of cover art
// This prioritizes files from lower-numbered disc folders by sorting the paths
slices.Sort(imgFiles)
// This prioritizes files without numeric suffixes (e.g., cover.jpg over cover.1.jpg)
// by comparing base filenames without extensions
slices.SortFunc(imgFiles, compareImageFiles)
return paths, imgFiles, &updatedAt, nil
}
// compareImageFiles compares two image file paths for sorting.
// It extracts the base filename (without extension) and compares case-insensitively.
// This ensures that "cover.jpg" sorts before "cover.1.jpg" since "cover" < "cover.1".
// Note: This function is called O(n log n) times during sorting, but in practice albums
// typically have only 1-20 image files, making the repeated string operations negligible.
func compareImageFiles(a, b string) int {
// Case-insensitive comparison
a = strings.ToLower(a)
b = strings.ToLower(b)
// Extract base filenames without extensions
baseA := strings.TrimSuffix(filepath.Base(a), filepath.Ext(a))
baseB := strings.TrimSuffix(filepath.Base(b), filepath.Ext(b))
// Compare base names first, then full paths if equal
return cmp.Or(
natural.Compare(baseA, baseB),
natural.Compare(a, b),
)
}

View File

@ -27,26 +27,7 @@ var _ = Describe("Album Artwork Reader", func() {
expectedAt = now.Add(5 * time.Minute)
// Set up the test folders with image files
repo = &fakeFolderRepo{
result: []model.Folder{
{
Path: "Artist/Album/Disc1",
ImagesUpdatedAt: expectedAt,
ImageFiles: []string{"cover.jpg", "back.jpg"},
},
{
Path: "Artist/Album/Disc2",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
{
Path: "Artist/Album/Disc10",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
},
err: nil,
}
repo = &fakeFolderRepo{}
ds = &fakeDataStore{
folderRepo: repo,
}
@ -58,19 +39,82 @@ var _ = Describe("Album Artwork Reader", func() {
})
It("returns sorted image files", func() {
repo.result = []model.Folder{
{
Path: "Artist/Album/Disc1",
ImagesUpdatedAt: expectedAt,
ImageFiles: []string{"cover.jpg", "back.jpg", "cover.1.jpg"},
},
{
Path: "Artist/Album/Disc2",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
{
Path: "Artist/Album/Disc10",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
}
_, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, ds, album)
Expect(err).ToNot(HaveOccurred())
Expect(*imagesUpdatedAt).To(Equal(expectedAt))
// Check that image files are sorted alphabetically
Expect(imgFiles).To(HaveLen(4))
// Check that image files are sorted by base name (without extension)
Expect(imgFiles).To(HaveLen(5))
// The files should be sorted by full path
// Files should be sorted by base filename without extension, then by full path
// "back" < "cover", so back.jpg comes first
// Then all cover.jpg files, sorted by path
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/back.jpg")))
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
Expect(imgFiles[4]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.1.jpg")))
})
It("prioritizes files without numeric suffixes", func() {
// Test case for issue #4683: cover.jpg should come before cover.1.jpg
repo.result = []model.Folder{
{
Path: "Artist/Album",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.1.jpg", "cover.jpg", "cover.2.jpg"},
},
}
_, imgFiles, _, err := loadAlbumFoldersPaths(ctx, ds, album)
Expect(err).ToNot(HaveOccurred())
Expect(imgFiles).To(HaveLen(3))
// cover.jpg should come first because "cover" < "cover.1" < "cover.2"
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/cover.jpg")))
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/cover.1.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/cover.2.jpg")))
})
It("handles case-insensitive sorting", func() {
// Test that Cover.jpg and cover.jpg are treated as equivalent
repo.result = []model.Folder{
{
Path: "Artist/Album",
ImagesUpdatedAt: now,
ImageFiles: []string{"Folder.jpg", "cover.jpg", "BACK.jpg"},
},
}
_, imgFiles, _, err := loadAlbumFoldersPaths(ctx, ds, album)
Expect(err).ToNot(HaveOccurred())
Expect(imgFiles).To(HaveLen(3))
// Files should be sorted case-insensitively: BACK, cover, Folder
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/BACK.jpg")))
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/cover.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Folder.jpg")))
})
})
})

View File

@ -8,6 +8,7 @@ import (
"io/fs"
"os"
"path/filepath"
"slices"
"strings"
"time"
@ -139,11 +140,22 @@ func findImageInFolder(ctx context.Context, folder, pattern string) (io.ReadClos
return nil, "", err
}
// Filter to valid image files
var imagePaths []string
for _, m := range matches {
if !model.IsImageFile(m) {
continue
}
filePath := filepath.Join(folder, m)
imagePaths = append(imagePaths, m)
}
// Sort image files by prioritizing base filenames without numeric
// suffixes (e.g., artist.jpg before artist.1.jpg)
slices.SortFunc(imagePaths, compareImageFiles)
// Try to open files in sorted order
for _, p := range imagePaths {
filePath := filepath.Join(folder, p)
f, err := os.Open(filePath)
if err != nil {
log.Warn(ctx, "Could not open cover art file", "file", filePath, err)

View File

@ -240,24 +240,79 @@ var _ = Describe("artistArtworkReader", func() {
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
// Create multiple matching files
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("jpg image"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.abc"), []byte("text file"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.png"), []byte("png image"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.txt"), []byte("text file"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("jpg image"), 0600)).To(Succeed())
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
})
It("returns the first valid image file", func() {
It("returns the first valid image file in sorted order", func() {
reader, path, err := testFunc()
Expect(err).ToNot(HaveOccurred())
Expect(reader).ToNot(BeNil())
// Should return an image file, not the text file
Expect(path).To(SatisfyAny(
ContainSubstring("artist.jpg"),
ContainSubstring("artist.png"),
))
Expect(path).ToNot(ContainSubstring("artist.txt"))
// Should return an image file,
// Files are sorted: jpg comes before png alphabetically.
// .abc comes first, but it's not an image.
Expect(path).To(ContainSubstring("artist.jpg"))
reader.Close()
})
})
When("prioritizing files without numeric suffixes", func() {
BeforeEach(func() {
// Test case for issue #4683: artist.jpg should come before artist.1.jpg
artistDir := filepath.Join(tempDir, "artist")
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
// Create multiple matches with and without numeric suffixes
Expect(os.WriteFile(filepath.Join(artistDir, "artist.1.jpg"), []byte("artist 1"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("artist main"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.2.jpg"), []byte("artist 2"), 0600)).To(Succeed())
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
})
It("returns artist.jpg before artist.1.jpg and artist.2.jpg", func() {
reader, path, err := testFunc()
Expect(err).ToNot(HaveOccurred())
Expect(reader).ToNot(BeNil())
Expect(path).To(ContainSubstring("artist.jpg"))
// Verify it's the main file, not a numbered variant
data, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(data)).To(Equal("artist main"))
reader.Close()
})
})
When("handling case-insensitive sorting", func() {
BeforeEach(func() {
// Test case to ensure case-insensitive natural sorting
artistDir := filepath.Join(tempDir, "artist")
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
// Create files with mixed case names
Expect(os.WriteFile(filepath.Join(artistDir, "Folder.jpg"), []byte("folder"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("artist"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "BACK.jpg"), []byte("back"), 0600)).To(Succeed())
testFunc = fromArtistFolder(ctx, artistDir, "*.*")
})
It("sorts case-insensitively", func() {
reader, path, err := testFunc()
Expect(err).ToNot(HaveOccurred())
Expect(reader).ToNot(BeNil())
// Should return artist.jpg first (case-insensitive: "artist" < "back" < "folder")
Expect(path).To(ContainSubstring("artist.jpg"))
data, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(data)).To(Equal("artist"))
reader.Close()
})
})

View File

@ -21,11 +21,6 @@ import (
"github.com/navidrome/navidrome/utils/slice"
)
// Scanner interface for triggering scans
type Scanner interface {
ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error)
}
// Watcher interface for managing file system watchers
type Watcher interface {
Watch(ctx context.Context, lib *model.Library) error
@ -43,13 +38,13 @@ type Library interface {
type libraryService struct {
ds model.DataStore
scanner Scanner
scanner model.Scanner
watcher Watcher
broker events.Broker
}
// NewLibrary creates a new Library service
func NewLibrary(ds model.DataStore, scanner Scanner, watcher Watcher, broker events.Broker) Library {
func NewLibrary(ds model.DataStore, scanner model.Scanner, watcher Watcher, broker events.Broker) Library {
return &libraryService{
ds: ds,
scanner: scanner,
@ -155,7 +150,7 @@ type libraryRepositoryWrapper struct {
model.LibraryRepository
ctx context.Context
ds model.DataStore
scanner Scanner
scanner model.Scanner
watcher Watcher
broker events.Broker
}
@ -192,7 +187,7 @@ func (r *libraryRepositoryWrapper) Save(entity interface{}) (string, error) {
return strconv.Itoa(lib.ID), nil
}
func (r *libraryRepositoryWrapper) Update(id string, entity interface{}, cols ...string) error {
func (r *libraryRepositoryWrapper) Update(id string, entity interface{}, _ ...string) error {
lib := entity.(*model.Library)
libID, err := strconv.Atoi(id)
if err != nil {

View File

@ -29,7 +29,7 @@ var _ = Describe("Library Service", func() {
var userRepo *tests.MockedUserRepo
var ctx context.Context
var tempDir string
var scanner *mockScanner
var scanner *tests.MockScanner
var watcherManager *mockWatcherManager
var broker *mockEventBroker
@ -43,7 +43,7 @@ var _ = Describe("Library Service", func() {
ds.MockedUser = userRepo
// Create a mock scanner that tracks calls
scanner = &mockScanner{}
scanner = tests.NewMockScanner()
// Create a mock watcher manager
watcherManager = &mockWatcherManager{
libraryStates: make(map[int]model.Library),
@ -616,11 +616,12 @@ var _ = Describe("Library Service", func() {
// Wait briefly for the goroutine to complete
Eventually(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "1s", "10ms").Should(Equal(1))
// Verify scan was called with correct parameters
Expect(scanner.ScanCalls[0].FullScan).To(BeFalse()) // Should be quick scan
calls := scanner.GetScanAllCalls()
Expect(calls[0].FullScan).To(BeFalse()) // Should be quick scan
})
It("triggers scan when updating library path", func() {
@ -641,11 +642,12 @@ var _ = Describe("Library Service", func() {
// Wait briefly for the goroutine to complete
Eventually(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "1s", "10ms").Should(Equal(1))
// Verify scan was called with correct parameters
Expect(scanner.ScanCalls[0].FullScan).To(BeFalse()) // Should be quick scan
calls := scanner.GetScanAllCalls()
Expect(calls[0].FullScan).To(BeFalse()) // Should be quick scan
})
It("does not trigger scan when updating library without path change", func() {
@ -661,7 +663,7 @@ var _ = Describe("Library Service", func() {
// Wait a bit to ensure no scan was triggered
Consistently(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "100ms", "10ms").Should(Equal(0))
})
@ -674,7 +676,7 @@ var _ = Describe("Library Service", func() {
// Ensure no scan was triggered since creation failed
Consistently(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "100ms", "10ms").Should(Equal(0))
})
@ -691,7 +693,7 @@ var _ = Describe("Library Service", func() {
// Ensure no scan was triggered since update failed
Consistently(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "100ms", "10ms").Should(Equal(0))
})
@ -707,11 +709,12 @@ var _ = Describe("Library Service", func() {
// Wait briefly for the goroutine to complete
Eventually(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "1s", "10ms").Should(Equal(1))
// Verify scan was called with correct parameters
Expect(scanner.ScanCalls[0].FullScan).To(BeFalse()) // Should be quick scan
calls := scanner.GetScanAllCalls()
Expect(calls[0].FullScan).To(BeFalse()) // Should be quick scan
})
It("does not trigger scan when library deletion fails", func() {
@ -721,7 +724,7 @@ var _ = Describe("Library Service", func() {
// Ensure no scan was triggered since deletion failed
Consistently(func() int {
return scanner.len()
return scanner.GetScanAllCallCount()
}, "100ms", "10ms").Should(Equal(0))
})
@ -868,31 +871,6 @@ var _ = Describe("Library Service", func() {
})
})
// mockScanner provides a simple mock implementation of core.Scanner for testing
type mockScanner struct {
ScanCalls []ScanCall
mu sync.RWMutex
}
type ScanCall struct {
FullScan bool
}
func (m *mockScanner) ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error) {
m.mu.Lock()
defer m.mu.Unlock()
m.ScanCalls = append(m.ScanCalls, ScanCall{
FullScan: fullScan,
})
return []string{}, nil
}
func (m *mockScanner) len() int {
m.mu.RLock()
defer m.mu.RUnlock()
return len(m.ScanCalls)
}
// mockWatcherManager provides a simple mock implementation of core.Watcher for testing
type mockWatcherManager struct {
StartedWatchers []model.Library

View File

@ -8,6 +8,7 @@ import (
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/utils/ioutils"
)
func fromEmbedded(ctx context.Context, mf *model.MediaFile) (model.LyricList, error) {
@ -27,8 +28,7 @@ func fromExternalFile(ctx context.Context, mf *model.MediaFile, suffix string) (
externalLyric := basePath[0:len(basePath)-len(ext)] + suffix
contents, err := os.ReadFile(externalLyric)
contents, err := ioutils.UTF8ReadFile(externalLyric)
if errors.Is(err, os.ErrNotExist) {
log.Trace(ctx, "no lyrics found at path", "path", externalLyric)
return nil, nil

View File

@ -108,5 +108,39 @@ var _ = Describe("sources", func() {
},
}))
})
It("should handle LRC files with UTF-8 BOM marker (issue #4631)", func() {
// The function looks for <basePath-without-ext><suffix>, so we need to pass
// a MediaFile with .mp3 path and look for .lrc suffix
mf := model.MediaFile{Path: "tests/fixtures/bom-test.mp3"}
lyrics, err := fromExternalFile(ctx, &mf, ".lrc")
Expect(err).To(BeNil())
Expect(lyrics).ToNot(BeNil())
Expect(lyrics).To(HaveLen(1))
// The critical assertion: even with BOM, synced should be true
Expect(lyrics[0].Synced).To(BeTrue(), "Lyrics with BOM marker should be recognized as synced")
Expect(lyrics[0].Line).To(HaveLen(1))
Expect(lyrics[0].Line[0].Start).To(Equal(gg.P(int64(0))))
Expect(lyrics[0].Line[0].Value).To(ContainSubstring("作曲"))
})
It("should handle UTF-16 LE encoded LRC files", func() {
mf := model.MediaFile{Path: "tests/fixtures/bom-utf16-test.mp3"}
lyrics, err := fromExternalFile(ctx, &mf, ".lrc")
Expect(err).To(BeNil())
Expect(lyrics).ToNot(BeNil())
Expect(lyrics).To(HaveLen(1))
// UTF-16 should be properly converted to UTF-8
Expect(lyrics[0].Synced).To(BeTrue(), "UTF-16 encoded lyrics should be recognized as synced")
Expect(lyrics[0].Line).To(HaveLen(2))
Expect(lyrics[0].Line[0].Start).To(Equal(gg.P(int64(18800))))
Expect(lyrics[0].Line[0].Value).To(Equal("We're no strangers to love"))
Expect(lyrics[0].Line[1].Start).To(Equal(gg.P(int64(22801))))
Expect(lyrics[0].Line[1].Value).To(Equal("You know the rules and so do I"))
})
})
})

226
core/maintenance.go Normal file
View File

@ -0,0 +1,226 @@
package core
import (
"context"
"fmt"
"slices"
"sync"
"time"
"github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/utils/slice"
)
type Maintenance interface {
// DeleteMissingFiles deletes specific missing files by their IDs
DeleteMissingFiles(ctx context.Context, ids []string) error
// DeleteAllMissingFiles deletes all files marked as missing
DeleteAllMissingFiles(ctx context.Context) error
}
type maintenanceService struct {
ds model.DataStore
wg sync.WaitGroup
}
func NewMaintenance(ds model.DataStore) Maintenance {
return &maintenanceService{
ds: ds,
}
}
func (s *maintenanceService) DeleteMissingFiles(ctx context.Context, ids []string) error {
return s.deleteMissing(ctx, ids)
}
func (s *maintenanceService) DeleteAllMissingFiles(ctx context.Context) error {
return s.deleteMissing(ctx, nil)
}
// deleteMissing handles the deletion of missing files and triggers necessary cleanup operations
func (s *maintenanceService) deleteMissing(ctx context.Context, ids []string) error {
// Track affected album IDs before deletion for refresh
affectedAlbumIDs, err := s.getAffectedAlbumIDs(ctx, ids)
if err != nil {
log.Warn(ctx, "Error tracking affected albums for refresh", err)
// Don't fail the operation, just log the warning
}
// Delete missing files within a transaction
err = s.ds.WithTx(func(tx model.DataStore) error {
if len(ids) == 0 {
_, err := tx.MediaFile(ctx).DeleteAllMissing()
return err
}
return tx.MediaFile(ctx).DeleteMissing(ids)
})
if err != nil {
log.Error(ctx, "Error deleting missing tracks from DB", "ids", ids, err)
return err
}
// Run garbage collection to clean up orphaned records
if err := s.ds.GC(ctx); err != nil {
log.Error(ctx, "Error running GC after deleting missing tracks", err)
return err
}
// Refresh statistics in background
s.refreshStatsAsync(ctx, affectedAlbumIDs)
return nil
}
// refreshAlbums recalculates album attributes (size, duration, song count, etc.) from media files.
// It uses batch queries to minimize database round-trips for efficiency.
func (s *maintenanceService) refreshAlbums(ctx context.Context, albumIDs []string) error {
if len(albumIDs) == 0 {
return nil
}
log.Debug(ctx, "Refreshing albums", "count", len(albumIDs))
// Process in chunks to avoid query size limits
const chunkSize = 100
for chunk := range slice.CollectChunks(slices.Values(albumIDs), chunkSize) {
if err := s.refreshAlbumChunk(ctx, chunk); err != nil {
return fmt.Errorf("refreshing album chunk: %w", err)
}
}
log.Debug(ctx, "Successfully refreshed albums", "count", len(albumIDs))
return nil
}
// refreshAlbumChunk processes a single chunk of album IDs
func (s *maintenanceService) refreshAlbumChunk(ctx context.Context, albumIDs []string) error {
albumRepo := s.ds.Album(ctx)
mfRepo := s.ds.MediaFile(ctx)
// Batch load existing albums
albums, err := albumRepo.GetAll(model.QueryOptions{
Filters: squirrel.Eq{"album.id": albumIDs},
})
if err != nil {
return fmt.Errorf("loading albums: %w", err)
}
// Create a map for quick lookup
albumMap := make(map[string]*model.Album, len(albums))
for i := range albums {
albumMap[albums[i].ID] = &albums[i]
}
// Batch load all media files for these albums
mediaFiles, err := mfRepo.GetAll(model.QueryOptions{
Filters: squirrel.Eq{"album_id": albumIDs},
Sort: "album_id, path",
})
if err != nil {
return fmt.Errorf("loading media files: %w", err)
}
// Group media files by album ID
filesByAlbum := make(map[string]model.MediaFiles)
for i := range mediaFiles {
albumID := mediaFiles[i].AlbumID
filesByAlbum[albumID] = append(filesByAlbum[albumID], mediaFiles[i])
}
// Recalculate each album from its media files
for albumID, oldAlbum := range albumMap {
mfs, hasTracks := filesByAlbum[albumID]
if !hasTracks {
// Album has no tracks anymore, skip (will be cleaned up by GC)
log.Debug(ctx, "Skipping album with no tracks", "albumID", albumID)
continue
}
// Recalculate album from media files
newAlbum := mfs.ToAlbum()
// Only update if something changed (avoid unnecessary writes)
if !oldAlbum.Equals(newAlbum) {
// Preserve original timestamps
newAlbum.UpdatedAt = time.Now()
newAlbum.CreatedAt = oldAlbum.CreatedAt
if err := albumRepo.Put(&newAlbum); err != nil {
log.Error(ctx, "Error updating album during refresh", "albumID", albumID, err)
// Continue with other albums instead of failing entirely
continue
}
log.Trace(ctx, "Refreshed album", "albumID", albumID, "name", newAlbum.Name)
}
}
return nil
}
// getAffectedAlbumIDs returns distinct album IDs from missing media files
func (s *maintenanceService) getAffectedAlbumIDs(ctx context.Context, ids []string) ([]string, error) {
var filters squirrel.Sqlizer = squirrel.Eq{"missing": true}
if len(ids) > 0 {
filters = squirrel.And{
squirrel.Eq{"missing": true},
squirrel.Eq{"media_file.id": ids},
}
}
mfs, err := s.ds.MediaFile(ctx).GetAll(model.QueryOptions{
Filters: filters,
})
if err != nil {
return nil, err
}
// Extract unique album IDs
albumIDMap := make(map[string]struct{}, len(mfs))
for _, mf := range mfs {
if mf.AlbumID != "" {
albumIDMap[mf.AlbumID] = struct{}{}
}
}
albumIDs := make([]string, 0, len(albumIDMap))
for id := range albumIDMap {
albumIDs = append(albumIDs, id)
}
return albumIDs, nil
}
// refreshStatsAsync refreshes artist and album statistics in background goroutines
func (s *maintenanceService) refreshStatsAsync(ctx context.Context, affectedAlbumIDs []string) {
// Refresh artist stats in background
s.wg.Add(1)
go func() {
defer s.wg.Done()
bgCtx := request.AddValues(context.Background(), ctx)
if _, err := s.ds.Artist(bgCtx).RefreshStats(true); err != nil {
log.Error(bgCtx, "Error refreshing artist stats after deleting missing files", err)
} else {
log.Debug(bgCtx, "Successfully refreshed artist stats after deleting missing files")
}
// Refresh album stats in background if we have affected albums
if len(affectedAlbumIDs) > 0 {
if err := s.refreshAlbums(bgCtx, affectedAlbumIDs); err != nil {
log.Error(bgCtx, "Error refreshing album stats after deleting missing files", err)
} else {
log.Debug(bgCtx, "Successfully refreshed album stats after deleting missing files", "count", len(affectedAlbumIDs))
}
}
}()
}
// Wait waits for all background goroutines to complete.
// WARNING: This method is ONLY for testing. Never call this in production code.
// Calling Wait() in production will block until ALL background operations complete
// and may cause race conditions with new operations starting.
func (s *maintenanceService) wait() {
s.wg.Wait()
}

364
core/maintenance_test.go Normal file
View File

@ -0,0 +1,364 @@
package core
import (
"context"
"errors"
"sync"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/sirupsen/logrus"
)
var _ = Describe("Maintenance", func() {
var ds *tests.MockDataStore
var mfRepo *extendedMediaFileRepo
var service Maintenance
var ctx context.Context
BeforeEach(func() {
ctx = context.Background()
ctx = request.WithUser(ctx, model.User{ID: "user1", IsAdmin: true})
ds = createTestDataStore()
mfRepo = ds.MockedMediaFile.(*extendedMediaFileRepo)
service = NewMaintenance(ds)
})
Describe("DeleteMissingFiles", func() {
Context("with specific IDs", func() {
It("deletes specific missing files and runs GC", func() {
// Setup: mock missing files with album IDs
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album2", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf2"})
Expect(err).ToNot(HaveOccurred())
Expect(mfRepo.deleteMissingCalled).To(BeTrue())
Expect(mfRepo.deletedIDs).To(Equal([]string{"mf1", "mf2"}))
Expect(ds.GCCalled).To(BeTrue(), "GC should be called after deletion")
})
It("triggers artist stats refresh and album refresh after deletion", func() {
artistRepo := ds.MockedArtist.(*extendedArtistRepo)
// Setup: mock missing files with albums
albumRepo := ds.MockedAlbum.(*extendedAlbumRepo)
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Test Album", SongCount: 5},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: false, Size: 1000, Duration: 180},
{ID: "mf3", AlbumID: "album1", Missing: false, Size: 2000, Duration: 200},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).ToNot(HaveOccurred())
// Wait for background goroutines to complete
service.(*maintenanceService).wait()
// RefreshStats should be called
Expect(artistRepo.IsRefreshStatsCalled()).To(BeTrue(), "Artist stats should be refreshed")
// Album should be updated with new calculated values
Expect(albumRepo.GetPutCallCount()).To(BeNumerically(">", 0), "Album.Put() should be called to refresh album data")
})
It("returns error if deletion fails", func() {
mfRepo.deleteMissingError = errors.New("delete failed")
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("delete failed"))
})
It("continues even if album tracking fails", func() {
mfRepo.SetError(true)
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
// Should not fail, just log warning
Expect(err).ToNot(HaveOccurred())
Expect(mfRepo.deleteMissingCalled).To(BeTrue())
})
It("returns error if GC fails", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
})
// Set GC to return error
ds.GCError = errors.New("gc failed")
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("gc failed"))
})
})
Context("album ID extraction", func() {
It("extracts unique album IDs from missing files", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: true},
{ID: "mf3", AlbumID: "album2", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf2", "mf3"})
Expect(err).ToNot(HaveOccurred())
})
It("skips files without album IDs", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf2"})
Expect(err).ToNot(HaveOccurred())
})
})
})
Describe("DeleteAllMissingFiles", func() {
It("deletes all missing files and runs GC", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album2", Missing: true},
{ID: "mf3", AlbumID: "album3", Missing: true},
})
err := service.DeleteAllMissingFiles(ctx)
Expect(err).ToNot(HaveOccurred())
Expect(ds.GCCalled).To(BeTrue(), "GC should be called after deletion")
})
It("returns error if deletion fails", func() {
mfRepo.SetError(true)
err := service.DeleteAllMissingFiles(ctx)
Expect(err).To(HaveOccurred())
})
It("handles empty result gracefully", func() {
mfRepo.SetData(model.MediaFiles{})
err := service.DeleteAllMissingFiles(ctx)
Expect(err).ToNot(HaveOccurred())
})
})
Describe("Album refresh logic", func() {
var albumRepo *extendedAlbumRepo
BeforeEach(func() {
albumRepo = ds.MockedAlbum.(*extendedAlbumRepo)
})
Context("when album has no tracks after deletion", func() {
It("skips the album without updating it", func() {
// Setup album with no remaining tracks
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Empty Album", SongCount: 1},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).ToNot(HaveOccurred())
// Wait for background goroutines to complete
service.(*maintenanceService).wait()
// Album should NOT be updated because it has no tracks left
Expect(albumRepo.GetPutCallCount()).To(Equal(0), "Album with no tracks should not be updated")
})
})
Context("when Put fails for one album", func() {
It("continues processing other albums", func() {
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Album 1"},
{ID: "album2", Name: "Album 2"},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: false, Size: 1000, Duration: 180},
{ID: "mf3", AlbumID: "album2", Missing: true},
{ID: "mf4", AlbumID: "album2", Missing: false, Size: 2000, Duration: 200},
})
// Make Put fail on first call but succeed on subsequent calls
albumRepo.putError = errors.New("put failed")
albumRepo.failOnce = true
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf3"})
// Should not fail even if one album's Put fails
Expect(err).ToNot(HaveOccurred())
// Wait for background goroutines to complete
service.(*maintenanceService).wait()
// Put should have been called multiple times
Expect(albumRepo.GetPutCallCount()).To(BeNumerically(">", 0), "Put should be attempted")
})
})
Context("when media file loading fails", func() {
It("logs warning but continues when tracking affected albums fails", func() {
// Set up log capturing
hook, cleanup := tests.LogHook()
defer cleanup()
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Album 1"},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
})
// Make GetAll fail when loading media files
mfRepo.SetError(true)
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
// Deletion should succeed despite the tracking error
Expect(err).ToNot(HaveOccurred())
Expect(mfRepo.deleteMissingCalled).To(BeTrue())
// Verify the warning was logged
Expect(hook.LastEntry()).ToNot(BeNil())
Expect(hook.LastEntry().Level).To(Equal(logrus.WarnLevel))
Expect(hook.LastEntry().Message).To(Equal("Error tracking affected albums for refresh"))
})
})
})
})
// Test helper to create a mock DataStore with controllable behavior
func createTestDataStore() *tests.MockDataStore {
ds := &tests.MockDataStore{}
// Create extended album repo with Put tracking
albumRepo := &extendedAlbumRepo{
MockAlbumRepo: tests.CreateMockAlbumRepo(),
}
ds.MockedAlbum = albumRepo
// Create extended artist repo with RefreshStats tracking
artistRepo := &extendedArtistRepo{
MockArtistRepo: tests.CreateMockArtistRepo(),
}
ds.MockedArtist = artistRepo
// Create extended media file repo with DeleteMissing support
mfRepo := &extendedMediaFileRepo{
MockMediaFileRepo: tests.CreateMockMediaFileRepo(),
}
ds.MockedMediaFile = mfRepo
return ds
}
// Extension of MockMediaFileRepo to add DeleteMissing method
type extendedMediaFileRepo struct {
*tests.MockMediaFileRepo
deleteMissingCalled bool
deletedIDs []string
deleteMissingError error
}
func (m *extendedMediaFileRepo) DeleteMissing(ids []string) error {
m.deleteMissingCalled = true
m.deletedIDs = ids
if m.deleteMissingError != nil {
return m.deleteMissingError
}
// Actually delete from the mock data
for _, id := range ids {
delete(m.Data, id)
}
return nil
}
// Extension of MockAlbumRepo to track Put calls
type extendedAlbumRepo struct {
*tests.MockAlbumRepo
mu sync.RWMutex
putCallCount int
lastPutData *model.Album
putError error
failOnce bool
}
func (m *extendedAlbumRepo) Put(album *model.Album) error {
m.mu.Lock()
m.putCallCount++
m.lastPutData = album
// Handle failOnce behavior
var err error
if m.putError != nil {
if m.failOnce {
err = m.putError
m.putError = nil // Clear error after first failure
m.mu.Unlock()
return err
}
err = m.putError
m.mu.Unlock()
return err
}
m.mu.Unlock()
return m.MockAlbumRepo.Put(album)
}
func (m *extendedAlbumRepo) GetPutCallCount() int {
m.mu.RLock()
defer m.mu.RUnlock()
return m.putCallCount
}
// Extension of MockArtistRepo to track RefreshStats calls
type extendedArtistRepo struct {
*tests.MockArtistRepo
mu sync.RWMutex
refreshStatsCalled bool
refreshStatsError error
}
func (m *extendedArtistRepo) RefreshStats(allArtists bool) (int64, error) {
m.mu.Lock()
m.refreshStatsCalled = true
err := m.refreshStatsError
m.mu.Unlock()
if err != nil {
return 0, err
}
return m.MockArtistRepo.RefreshStats(allArtists)
}
func (m *extendedArtistRepo) IsRefreshStatsCalled() bool {
m.mu.RLock()
defer m.mu.RUnlock()
return m.refreshStatsCalled
}

View File

@ -6,6 +6,7 @@ import (
"encoding/json"
"math"
"net/http"
"os"
"path/filepath"
"runtime"
"runtime/debug"
@ -160,6 +161,13 @@ var staticData = sync.OnceValue(func() insights.Data {
data.Build.Settings, data.Build.GoVersion = buildInfo()
data.OS.Containerized = consts.InContainer
// Install info
packageFilename := filepath.Join(conf.Server.DataFolder, ".package")
packageFileData, err := os.ReadFile(packageFilename)
if err == nil {
data.OS.Package = string(packageFileData)
}
// OS info
data.OS.Type = runtime.GOOS
data.OS.Arch = runtime.GOARCH

View File

@ -16,6 +16,7 @@ type Data struct {
Containerized bool `json:"containerized"`
Arch string `json:"arch"`
NumCPU int `json:"numCPU"`
Package string `json:"package,omitempty"`
} `json:"os"`
Mem struct {
Alloc uint64 `json:"alloc"`

View File

@ -20,6 +20,7 @@ import (
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/criteria"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/utils/ioutils"
"github.com/navidrome/navidrome/utils/slice"
"golang.org/x/text/unicode/norm"
)
@ -97,12 +98,13 @@ func (s *playlists) parsePlaylist(ctx context.Context, playlistFile string, fold
}
defer file.Close()
reader := ioutils.UTF8Reader(file)
extension := strings.ToLower(filepath.Ext(playlistFile))
switch extension {
case ".nsp":
err = s.parseNSP(ctx, pls, file)
err = s.parseNSP(ctx, pls, reader)
default:
err = s.parseM3U(ctx, pls, folder, file)
err = s.parseM3U(ctx, pls, folder, reader)
}
return pls, err
}

View File

@ -74,6 +74,24 @@ var _ = Describe("Playlists", func() {
Expect(err).ToNot(HaveOccurred())
Expect(pls.Tracks).To(HaveLen(2))
})
It("parses playlists with UTF-8 BOM marker", func() {
pls, err := ps.ImportFile(ctx, folder, "bom-test.m3u")
Expect(err).ToNot(HaveOccurred())
Expect(pls.OwnerID).To(Equal("123"))
Expect(pls.Name).To(Equal("Test Playlist"))
Expect(pls.Tracks).To(HaveLen(1))
Expect(pls.Tracks[0].Path).To(Equal("tests/fixtures/playlists/test.mp3"))
})
It("parses UTF-16 LE encoded playlists with BOM and converts to UTF-8", func() {
pls, err := ps.ImportFile(ctx, folder, "bom-test-utf16.m3u")
Expect(err).ToNot(HaveOccurred())
Expect(pls.OwnerID).To(Equal("123"))
Expect(pls.Name).To(Equal("UTF-16 Test Playlist"))
Expect(pls.Tracks).To(HaveLen(1))
Expect(pls.Tracks[0].Path).To(Equal("tests/fixtures/playlists/test.mp3"))
})
})
Describe("NSP", func() {

View File

@ -13,6 +13,7 @@ import (
"github.com/navidrome/navidrome/model"
. "github.com/navidrome/navidrome/utils/gg"
"github.com/navidrome/navidrome/utils/slice"
"github.com/navidrome/navidrome/utils/str"
)
type Share interface {
@ -119,9 +120,8 @@ func (r *shareRepositoryWrapper) Save(entity interface{}) (string, error) {
log.Error(r.ctx, "Invalid Resource ID", "id", firstId)
return "", model.ErrNotFound
}
if len(s.Contents) > 30 {
s.Contents = s.Contents[:26] + "..."
}
s.Contents = str.TruncateRunes(s.Contents, 30, "...")
id, err = r.Persistable.Save(s)
return id, err

View File

@ -38,6 +38,38 @@ var _ = Describe("Share", func() {
Expect(id).ToNot(BeEmpty())
Expect(entity.ID).To(Equal(id))
})
It("does not truncate ASCII labels shorter than 30 characters", func() {
_ = ds.MediaFile(ctx).Put(&model.MediaFile{ID: "456", Title: "Example Media File"})
entity := &model.Share{Description: "test", ResourceIDs: "456"}
_, err := repo.Save(entity)
Expect(err).ToNot(HaveOccurred())
Expect(entity.Contents).To(Equal("Example Media File"))
})
It("truncates ASCII labels longer than 30 characters", func() {
_ = ds.MediaFile(ctx).Put(&model.MediaFile{ID: "789", Title: "Example Media File But The Title Is Really Long For Testing Purposes"})
entity := &model.Share{Description: "test", ResourceIDs: "789"}
_, err := repo.Save(entity)
Expect(err).ToNot(HaveOccurred())
Expect(entity.Contents).To(Equal("Example Media File But The ..."))
})
It("does not truncate CJK labels shorter than 30 runes", func() {
_ = ds.MediaFile(ctx).Put(&model.MediaFile{ID: "456", Title: "青春コンプレックス"})
entity := &model.Share{Description: "test", ResourceIDs: "456"}
_, err := repo.Save(entity)
Expect(err).ToNot(HaveOccurred())
Expect(entity.Contents).To(Equal("青春コンプレックス"))
})
It("truncates CJK labels longer than 30 runes", func() {
_ = ds.MediaFile(ctx).Put(&model.MediaFile{ID: "789", Title: "私の中の幻想的世界観及びその顕現を想起させたある現実での出来事に関する一考察"})
entity := &model.Share{Description: "test", ResourceIDs: "789"}
_, err := repo.Save(entity)
Expect(err).ToNot(HaveOccurred())
Expect(entity.Contents).To(Equal("私の中の幻想的世界観及びその顕現を想起させたある現実で..."))
})
})
Describe("Update", func() {

View File

@ -18,6 +18,7 @@ var Set = wire.NewSet(
NewShare,
NewPlaylists,
NewLibrary,
NewMaintenance,
agents.GetAgents,
external.NewProvider,
wire.Bind(new(external.Agents), new(*agents.Agents)),

View File

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE playqueue ADD COLUMN position_int integer;
UPDATE playqueue SET position_int = CAST(position as INTEGER) ;
ALTER TABLE playqueue DROP COLUMN position;
ALTER TABLE playqueue RENAME COLUMN position_int TO position;
-- +goose StatementEnd
-- +goose Down

77
go.mod
View File

@ -1,15 +1,15 @@
module github.com/navidrome/navidrome
go 1.24.5
go 1.25.4
// Fork to fix https://github.com/navidrome/navidrome/pull/3254
// Fork to fix https://github.com/navidrome/navidrome/issues/3254
replace github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
require (
github.com/Masterminds/squirrel v1.5.4
github.com/RaveNoX/go-jsoncommentstrip v1.0.0
github.com/andybalholm/cascadia v1.3.3
github.com/bmatcuk/doublestar/v4 v4.9.0
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf
github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55
@ -22,15 +22,15 @@ require (
github.com/djherbis/times v1.6.0
github.com/dustin/go-humanize v1.0.1
github.com/fatih/structs v1.1.0
github.com/go-chi/chi/v5 v5.2.2
github.com/go-chi/chi/v5 v5.2.3
github.com/go-chi/cors v1.2.2
github.com/go-chi/httprate v0.15.0
github.com/go-chi/jwtauth/v5 v5.3.3
github.com/go-viper/encoding/ini v0.1.1
github.com/gohugoio/hashstructure v0.5.0
github.com/gohugoio/hashstructure v0.6.0
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc
github.com/google/uuid v1.6.0
github.com/google/wire v0.6.0
github.com/google/wire v0.7.0
github.com/gorilla/websocket v1.5.3
github.com/hashicorp/go-multierror v1.1.1
github.com/jellydator/ttlcache/v3 v3.4.0
@ -39,40 +39,42 @@ require (
github.com/knqyf263/go-plugin v0.9.0
github.com/kr/pretty v0.3.1
github.com/lestrrat-go/jwx/v2 v2.1.6
github.com/maruel/natural v1.2.1
github.com/matoous/go-nanoid/v2 v2.1.0
github.com/mattn/go-sqlite3 v1.14.29
github.com/mattn/go-sqlite3 v1.14.32
github.com/microcosm-cc/bluemonday v1.0.27
github.com/mileusna/useragent v1.3.5
github.com/onsi/ginkgo/v2 v2.23.4
github.com/onsi/gomega v1.38.0
github.com/onsi/ginkgo/v2 v2.27.2
github.com/onsi/gomega v1.38.2
github.com/pelletier/go-toml/v2 v2.2.4
github.com/pocketbase/dbx v1.11.0
github.com/pressly/goose/v3 v3.24.3
github.com/prometheus/client_golang v1.22.0
github.com/pressly/goose/v3 v3.26.0
github.com/prometheus/client_golang v1.23.2
github.com/rjeczalik/notify v0.9.3
github.com/robfig/cron/v3 v3.0.1
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.9.1
github.com/spf13/viper v1.20.1
github.com/stretchr/testify v1.10.0
github.com/tetratelabs/wazero v1.9.0
github.com/spf13/cobra v1.10.1
github.com/spf13/viper v1.21.0
github.com/stretchr/testify v1.11.1
github.com/tetratelabs/wazero v1.10.1
github.com/unrolled/secure v1.17.0
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
go.uber.org/goleak v1.3.0
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792
golang.org/x/image v0.29.0
golang.org/x/net v0.42.0
golang.org/x/sync v0.16.0
golang.org/x/sys v0.34.0
golang.org/x/text v0.27.0
golang.org/x/time v0.12.0
google.golang.org/protobuf v1.36.6
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
golang.org/x/image v0.33.0
golang.org/x/net v0.47.0
golang.org/x/sync v0.18.0
golang.org/x/sys v0.38.0
golang.org/x/text v0.31.0
golang.org/x/time v0.14.0
google.golang.org/protobuf v1.36.10
gopkg.in/yaml.v3 v3.0.1
)
require (
dario.cat/mergo v1.0.2 // indirect
github.com/Masterminds/semver/v3 v3.4.0 // indirect
github.com/atombender/go-jsonschema v0.20.0 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
@ -86,9 +88,9 @@ require (
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/goccy/go-yaml v1.17.1 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5 // indirect
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 // indirect
github.com/google/subcommands v1.2.0 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
@ -108,27 +110,28 @@ require (
github.com/ogier/pflag v0.0.1 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.62.0 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.66.1 // indirect
github.com/prometheus/procfs v0.16.1 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/sagikazarmark/locafero v0.9.0 // indirect
github.com/sagikazarmark/locafero v0.12.0 // indirect
github.com/sanity-io/litter v1.5.8 // indirect
github.com/segmentio/asm v1.2.0 // indirect
github.com/segmentio/asm v1.2.1 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/sosodev/duration v1.3.1 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.14.0 // indirect
github.com/spf13/cast v1.9.2 // indirect
github.com/spf13/pflag v1.0.7 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.10.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.uber.org/automaxprocs v1.6.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.40.0 // indirect
golang.org/x/mod v0.26.0 // indirect
golang.org/x/tools v0.35.0 // indirect
go.yaml.in/yaml/v2 v2.4.2 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/mod v0.30.0 // indirect
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 // indirect
golang.org/x/tools v0.39.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
)

191
go.sum
View File

@ -2,6 +2,8 @@ dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Masterminds/squirrel v1.5.4 h1:uUcX/aBc8O7Fg9kaISIUsHXdKuqehiXAMQTYX8afzqM=
github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10=
github.com/RaveNoX/go-jsoncommentstrip v1.0.0 h1:t527LHHE3HmiHrq74QMpNPZpGCIJzTx+apLkMKt4HC0=
@ -14,8 +16,8 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bmatcuk/doublestar/v4 v4.9.0 h1:DBvuZxjdKkRP/dr4GVV4w2fnmrk5Hxc90T51LZjv0JA=
github.com/bmatcuk/doublestar/v4 v4.9.0/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0=
github.com/cespare/reflex v0.3.1 h1:N4Y/UmRrjwOkNT0oQQnYsdr6YBxvHqtSfPB4mqOyAKk=
@ -60,8 +62,14 @@ github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7z
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618=
github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/gkampitakis/ciinfo v0.3.2 h1:JcuOPk8ZU7nZQjdUhctuhQofk7BGHuIy0c9Ez8BNhXs=
github.com/gkampitakis/ciinfo v0.3.2/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo=
github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M=
github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk=
github.com/gkampitakis/go-snaps v0.5.15 h1:amyJrvM1D33cPHwVrjo9jQxX8g/7E2wYdZ+01KS3zGE=
github.com/gkampitakis/go-snaps v0.5.15/go.mod h1:HNpx/9GoKisdhw9AFOBT1N7DBs9DiHo/hGheFGBZ+mc=
github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
github.com/go-chi/httprate v0.15.0 h1:j54xcWV9KGmPf/X4H32/aTH+wBlrvxL7P+SdnRqxh5g=
@ -71,8 +79,8 @@ github.com/go-chi/jwtauth/v5 v5.3.3/go.mod h1:O4QvPRuZLZghl9WvfVaON+ARfGzpD2PBX/
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU=
github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/go-viper/encoding/ini v0.1.1 h1:MVWY7B2XNw7lnOqHutGRc97bF3rP7omOdgjdMPAJgbs=
@ -81,25 +89,24 @@ github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9L
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.17.1 h1:LI34wktB2xEE3ONG/2Ar54+/HJVBriAGJ55PHls4YuY=
github.com/goccy/go-yaml v1.17.1/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/gohugoio/hashstructure v0.5.0 h1:G2fjSBU36RdwEJBWJ+919ERvOVqAg9tfcYp47K9swqg=
github.com/gohugoio/hashstructure v0.5.0/go.mod h1:Ser0TniXuu/eauYmrwM4o64EBvySxNzITEOLlm4igec=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/gohugoio/hashstructure v0.6.0 h1:7wMB/2CfXoThFYhdWRGv3u3rUM761Cq29CxUW+NltUg=
github.com/gohugoio/hashstructure v0.6.0/go.mod h1:lapVLk9XidheHG1IQ4ZSbyYrXcaILU1ZEP/+vno5rBQ=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5 h1:xhMrHhTJ6zxu3gA4enFM9MLn9AY7613teCdFnlUVbSQ=
github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5/go.mod h1:5hDyRhoBCxViHszMt12TnOpEI4VVi+U8Gm9iphldiMA=
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 h1:3DsUAV+VNEQa2CUVLxCY3f87278uWfIDhJnbdvDjvmE=
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/wire v0.6.0 h1:HBkoIh4BdSxoyo9PveV8giw7ZsaBOvzWKfcg/6MrVwI=
github.com/google/wire v0.6.0/go.mod h1:F4QhpQ9EDIdJ1Mbop/NZBRB+5yrR6qg3BnctaoUk6NA=
github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4=
github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
@ -115,6 +122,8 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY=
github.com/jellydator/ttlcache/v3 v3.4.0/go.mod h1:Hw9EgjymziQD3yGsQdf1FqFdpp7YjFMd4Srg5EJlgD4=
github.com/joshdk/go-junit v1.0.0 h1:S86cUKIdwBHWwA6xCmFlf3RTLfVXYQfvanM5Uh+K6GE=
github.com/joshdk/go-junit v1.0.0/go.mod h1:TiiV0PqkaNfFXjEiyjWM3XXrhVyCa1K4Zfga6W52ung=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/kardianos/service v1.2.4 h1:XNlGtZOYNx2u91urOdg/Kfmc+gfmuIo1Dd3rEi2OgBk=
@ -153,14 +162,18 @@ github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVf
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/maruel/natural v1.2.1 h1:G/y4pwtTA07lbQsMefvsmEO0VN0NfqpxprxXDM4R/4o=
github.com/maruel/natural v1.2.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.29 h1:1O6nRLJKvsi1H2Sj0Hzdfojwt8GiGKm+LOfLaBFaouQ=
github.com/mattn/go-sqlite3 v1.14.29/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/mfridman/tparse v0.18.0 h1:wh6dzOKaIwkUGyKgOntDW4liXSo37qg5AXbIhkMV3vE=
github.com/mfridman/tparse v0.18.0/go.mod h1:gEvqZTuCgEhPbYk/2lS3Kcxg1GmTxxU7kTC8DvP0i/A=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
github.com/mileusna/useragent v1.3.5 h1:SJM5NzBmh/hO+4LGeATKpaEX9+b4vcGg2qXGLiNGDws=
@ -173,10 +186,10 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ogier/pflag v0.0.1 h1:RW6JSWSu/RkSatfcLtogGfFgpim5p7ARQ10ECk5O750=
github.com/ogier/pflag v0.0.1/go.mod h1:zkFki7tvTa0tafRvTBIZTvzYyAu6kQhPZFnshFFPE+g=
github.com/onsi/ginkgo/v2 v2.23.4 h1:ktYTpKJAVZnDT4VjxSbiBenUjmlL/5QkBEocaWXiQus=
github.com/onsi/ginkgo/v2 v2.23.4/go.mod h1:Bt66ApGPBFzHyR+JO10Zbt0Gsp4uWxu5mIOTusL46e8=
github.com/onsi/gomega v1.38.0 h1:c/WX+w8SLAinvuKKQFh77WEucCnPk4j2OTUr7lt7BeY=
github.com/onsi/gomega v1.38.0/go.mod h1:OcXcwId0b9QsE7Y49u+BTrL4IdKOBOKnD6VQNTJEB6o=
github.com/onsi/ginkgo/v2 v2.27.2 h1:LzwLj0b89qtIy6SSASkzlNvX6WktqurSHwkk2ipF/Ns=
github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
@ -188,16 +201,14 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pocketbase/dbx v1.11.0 h1:LpZezioMfT3K4tLrqA55wWFw1EtH1pM4tzSVa7kgszU=
github.com/pocketbase/dbx v1.11.0/go.mod h1:xXRCIAKTHMgUCyCKZm55pUOdvFziJjQfXaWKhu2vhMs=
github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g=
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
github.com/pressly/goose/v3 v3.24.3 h1:DSWWNwwggVUsYZ0X2VitiAa9sKuqtBfe+Jr9zFGwWlM=
github.com/pressly/goose/v3 v3.24.3/go.mod h1:v9zYL4xdViLHCUUJh/mhjnm6JrK7Eul8AS93IxiZM4E=
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
@ -212,12 +223,12 @@ github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs=
github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFTqyMTC9k=
github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk=
github.com/sagikazarmark/locafero v0.12.0 h1:/NQhBAkUb4+fH1jivKHWusDYFjMOOKU88eegjfxfHb4=
github.com/sagikazarmark/locafero v0.12.0/go.mod h1:sZh36u/YSZ918v0Io+U9ogLYQJ9tLLBmM4eneO6WwsI=
github.com/sanity-io/litter v1.5.8 h1:uM/2lKrWdGbRXDrIq08Lh9XtVYoeGtcQxk9rtQ7+rYg=
github.com/sanity-io/litter v1.5.8/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
@ -229,19 +240,17 @@ github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIK
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4=
github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo=
github.com/spf13/cast v1.9.2 h1:SsGfm7M8QOFtEzumm7UZrZdLLquNdzFYfIbEXntcFbE=
github.com/spf13/cast v1.9.2/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M=
github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
@ -252,12 +261,20 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tetratelabs/wazero v1.10.1 h1:2DugeJf6VVk58KTPszlNfeeN8AhhpwcZqkJj2wwFuH8=
github.com/tetratelabs/wazero v1.10.1/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
github.com/unrolled/secure v1.17.0 h1:Io7ifFgo99Bnh0J7+Q+qcMzWM6kaDPCA5FroFZEdbWU=
github.com/unrolled/secure v1.17.0/go.mod h1:BmF5hyM6tXczk3MpQkFf1hpKSRqCyhqcbiQtiAF7+40=
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 h1:FnBeRrxr7OU4VvAzt5X7s6266i6cSVkkFPS0TuXWbIg=
@ -267,34 +284,34 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=
golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4=
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6 h1:zfMcR1Cs4KNuomFFgGefv5N0czO2XZpUbxGUy8i8ug0=
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.29.0 h1:HcdsyR4Gsuys/Axh0rDEmlBmB68rW1U9BUdB3UVHsas=
golang.org/x/image v0.29.0/go.mod h1:RVJROnf3SLK8d26OW91j4FrIHGbsJ8QnbEocVTOWQDA=
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg=
golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ=
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -303,12 +320,11 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -316,8 +332,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -331,19 +347,19 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo=
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
@ -357,24 +373,23 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0=
golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw=
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
@ -386,11 +401,11 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
modernc.org/libc v1.65.0 h1:e183gLDnAp9VJh6gWKdTy0CThL9Pt7MfcR/0bgb7Y1Y=
modernc.org/libc v1.65.0/go.mod h1:7m9VzGq7APssBTydds2zBcxGREwvIGpuUBaKTXdm2Qs=
modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ=
modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.10.0 h1:fzumd51yQ1DxcOxSO+S6X7+QTuVU+n8/Aj7swYjFfC4=
modernc.org/memory v1.10.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/sqlite v1.37.0 h1:s1TMe7T3Q3ovQiK2Ouz4Jwh7dw4ZDqbebSDTlSJdfjI=
modernc.org/sqlite v1.37.0/go.mod h1:5YiWv+YviqGMuGw4V+PNplcyaJ5v+vQd7TQOgkACoJM=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=

View File

@ -11,6 +11,7 @@ import (
"runtime"
"sort"
"strings"
"sync"
"time"
"github.com/sirupsen/logrus"
@ -70,6 +71,7 @@ type levelPath struct {
var (
currentLevel Level
loggerMu sync.RWMutex
defaultLogger = logrus.New()
logSourceLine = false
rootPath string
@ -78,8 +80,10 @@ var (
// SetLevel sets the global log level used by the simple logger.
func SetLevel(l Level) {
loggerMu.Lock()
currentLevel = l
defaultLogger.Level = logrus.TraceLevel
loggerMu.Unlock()
logrus.SetLevel(logrus.Level(l))
}
@ -110,6 +114,8 @@ func levelFromString(l string) Level {
// SetLogLevels sets the log levels for specific paths in the codebase.
func SetLogLevels(levels map[string]string) {
loggerMu.Lock()
defer loggerMu.Unlock()
logLevels = nil
for k, v := range levels {
logLevels = append(logLevels, levelPath{path: k, level: levelFromString(v)})
@ -125,6 +131,8 @@ func SetLogSourceLine(enabled bool) {
func SetRedacting(enabled bool) {
if enabled {
loggerMu.Lock()
defer loggerMu.Unlock()
defaultLogger.AddHook(redacted)
}
}
@ -133,6 +141,8 @@ func SetOutput(w io.Writer) {
if runtime.GOOS == "windows" {
w = CRLFWriter(w)
}
loggerMu.Lock()
defer loggerMu.Unlock()
defaultLogger.SetOutput(w)
}
@ -158,10 +168,14 @@ func NewContext(ctx context.Context, keyValuePairs ...interface{}) context.Conte
}
func SetDefaultLogger(l *logrus.Logger) {
loggerMu.Lock()
defer loggerMu.Unlock()
defaultLogger = l
}
func CurrentLevel() Level {
loggerMu.RLock()
defer loggerMu.RUnlock()
return currentLevel
}
@ -204,14 +218,21 @@ func log(level Level, args ...interface{}) {
}
func Writer() io.Writer {
loggerMu.RLock()
defer loggerMu.RUnlock()
return defaultLogger.Writer()
}
func shouldLog(requiredLevel Level, skip int) bool {
if currentLevel >= requiredLevel {
loggerMu.RLock()
level := currentLevel
levels := logLevels
loggerMu.RUnlock()
if level >= requiredLevel {
return true
}
if len(logLevels) == 0 {
if len(levels) == 0 {
return false
}
@ -221,7 +242,7 @@ func shouldLog(requiredLevel Level, skip int) bool {
}
file = strings.TrimPrefix(file, rootPath)
for _, lp := range logLevels {
for _, lp := range levels {
if strings.HasPrefix(file, lp.path) {
return lp.level >= requiredLevel
}
@ -314,6 +335,8 @@ func extractLogger(ctx interface{}) (*logrus.Entry, error) {
func createNewLogger() *logrus.Entry {
//logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true, DisableTimestamp: false, FullTimestamp: true})
//l.Formatter = &logrus.TextFormatter{ForceColors: true, DisableTimestamp: false, FullTimestamp: true}
loggerMu.RLock()
defer loggerMu.RUnlock()
logger := logrus.NewEntry(defaultLogger)
return logger
}

View File

@ -61,7 +61,12 @@ func (c Criteria) OrderBy() string {
if f.order != "" {
mapped = f.order
} else if f.isTag {
mapped = "COALESCE(json_extract(media_file.tags, '$." + sortField + "[0].value'), '')"
// Use the actual field name (handles aliases like albumtype -> releasetype)
tagName := sortField
if f.field != "" {
tagName = f.field
}
mapped = "COALESCE(json_extract(media_file.tags, '$." + tagName + "[0].value'), '')"
} else if f.isRole {
mapped = "COALESCE(json_extract(media_file.participants, '$." + sortField + "[0].name'), '')"
} else {

View File

@ -118,6 +118,16 @@ var _ = Describe("Criteria", func() {
)
})
It("sorts by albumtype alias (resolves to releasetype)", func() {
AddTagNames([]string{"releasetype"})
goObj.Sort = "albumtype"
gomega.Expect(goObj.OrderBy()).To(
gomega.Equal(
"COALESCE(json_extract(media_file.tags, '$.releasetype[0].value'), '') asc",
),
)
})
It("sorts by random", func() {
newObj := goObj
newObj.Sort = "random"

View File

@ -32,7 +32,6 @@ var fieldMap = map[string]*mappedField{
"sortalbum": {field: "media_file.sort_album_name"},
"sortartist": {field: "media_file.sort_artist_name"},
"sortalbumartist": {field: "media_file.sort_album_artist_name"},
"albumtype": {field: "media_file.mbz_album_type", alias: "releasetype"},
"albumcomment": {field: "media_file.mbz_album_comment"},
"catalognumber": {field: "media_file.catalog_num"},
"filepath": {field: "media_file.path"},
@ -55,6 +54,9 @@ var fieldMap = map[string]*mappedField{
"mbz_release_group_id": {field: "media_file.mbz_release_group_id"},
"library_id": {field: "media_file.library_id", numeric: true},
// Backward compatibility: albumtype is an alias for releasetype tag
"albumtype": {field: "releasetype", isTag: true},
// special fields
"random": {field: "", order: "random()"}, // pseudo-field for random sorting
"value": {field: "value"}, // pseudo-field for tag and roles values
@ -154,13 +156,19 @@ type tagCond struct {
func (e tagCond) ToSql() (string, []any, error) {
cond, args, err := e.cond.ToSql()
// Check if this tag is marked as numeric in the fieldMap
if fm, ok := fieldMap[e.tag]; ok && fm.numeric {
cond = strings.ReplaceAll(cond, "value", "CAST(value AS REAL)")
// Resolve the actual tag name (handles aliases like albumtype -> releasetype)
tagName := e.tag
if fm, ok := fieldMap[e.tag]; ok {
if fm.field != "" {
tagName = fm.field
}
if fm.numeric {
cond = strings.ReplaceAll(cond, "value", "CAST(value AS REAL)")
}
}
cond = fmt.Sprintf("exists (select 1 from json_tree(tags, '$.%s') where key='value' and %s)",
e.tag, cond)
tagName, cond)
if e.not {
cond = "not " + cond
}

View File

@ -105,6 +105,40 @@ var _ = Describe("Operators", func() {
gomega.Expect(sql).To(gomega.BeEmpty())
gomega.Expect(args).To(gomega.BeEmpty())
})
It("supports releasetype as multi-valued tag", func() {
AddTagNames([]string{"releasetype"})
op := Contains{"releasetype": "soundtrack"}
sql, args, err := op.ToSql()
gomega.Expect(err).ToNot(gomega.HaveOccurred())
gomega.Expect(sql).To(gomega.Equal("exists (select 1 from json_tree(tags, '$.releasetype') where key='value' and value LIKE ?)"))
gomega.Expect(args).To(gomega.HaveExactElements("%soundtrack%"))
})
It("supports albumtype as alias for releasetype", func() {
AddTagNames([]string{"releasetype"})
op := Contains{"albumtype": "live"}
sql, args, err := op.ToSql()
gomega.Expect(err).ToNot(gomega.HaveOccurred())
gomega.Expect(sql).To(gomega.Equal("exists (select 1 from json_tree(tags, '$.releasetype') where key='value' and value LIKE ?)"))
gomega.Expect(args).To(gomega.HaveExactElements("%live%"))
})
It("supports albumtype alias with Is operator", func() {
AddTagNames([]string{"releasetype"})
op := Is{"albumtype": "album"}
sql, args, err := op.ToSql()
gomega.Expect(err).ToNot(gomega.HaveOccurred())
// Should query $.releasetype, not $.albumtype
gomega.Expect(sql).To(gomega.Equal("exists (select 1 from json_tree(tags, '$.releasetype') where key='value' and value = ?)"))
gomega.Expect(args).To(gomega.HaveExactElements("album"))
})
It("supports albumtype alias with IsNot operator", func() {
AddTagNames([]string{"releasetype"})
op := IsNot{"albumtype": "compilation"}
sql, args, err := op.ToSql()
gomega.Expect(err).ToNot(gomega.HaveOccurred())
// Should query $.releasetype, not $.albumtype
gomega.Expect(sql).To(gomega.Equal("not exists (select 1 from json_tree(tags, '$.releasetype') where key='value' and value = ?)"))
gomega.Expect(args).To(gomega.HaveExactElements("compilation"))
})
})
Describe("Custom Roles", func() {

View File

@ -43,5 +43,5 @@ type DataStore interface {
WithTx(block func(tx DataStore) error, scope ...string) error
WithTxImmediate(block func(tx DataStore) error, scope ...string) error
GC(ctx context.Context) error
GC(ctx context.Context, libraryIDs ...int) error
}

View File

@ -85,7 +85,7 @@ type FolderRepository interface {
GetByPath(lib Library, path string) (*Folder, error)
GetAll(...QueryOptions) ([]Folder, error)
CountAll(...QueryOptions) (int64, error)
GetLastUpdates(lib Library) (map[string]FolderUpdateInfo, error)
GetFolderUpdateInfo(lib Library, targetPaths ...string) (map[string]FolderUpdateInfo, error)
Put(*Folder) error
MarkMissing(missing bool, ids ...string) error
GetTouchedWithPlaylists() (FolderCursor, error)

View File

@ -23,7 +23,7 @@ func legacyTrackID(mf model.MediaFile, prependLibId bool) string {
}
func legacyAlbumID(mf model.MediaFile, md Metadata, prependLibId bool) string {
releaseDate := legacyReleaseDate(md)
_, _, releaseDate := md.mapDates()
albumPath := strings.ToLower(fmt.Sprintf("%s\\%s", legacyMapAlbumArtistName(md), legacyMapAlbumName(md)))
if !conf.Server.Scanner.GroupAlbumReleases {
if len(releaseDate) != 0 {
@ -55,9 +55,3 @@ func legacyMapAlbumName(md Metadata) string {
consts.UnknownAlbum,
)
}
// Keep the TaggedLikePicard logic for backwards compatibility
func legacyReleaseDate(md Metadata) string {
_, _, releaseDate := md.mapDates()
return string(releaseDate)
}

View File

@ -1,30 +0,0 @@
package metadata
import (
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("legacyReleaseDate", func() {
DescribeTable("legacyReleaseDate",
func(recordingDate, originalDate, releaseDate, expected string) {
md := New("", Info{
Tags: map[string][]string{
"DATE": {recordingDate},
"ORIGINALDATE": {originalDate},
"RELEASEDATE": {releaseDate},
},
})
result := legacyReleaseDate(md)
Expect(result).To(Equal(expected))
},
Entry("regular mapping", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping", "2020-05-15", "2019-02-10", "", "2020-05-15"),
Entry("legacy mapping, originalYear < year", "2018-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, originalYear empty", "2020-05-15", "", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, releaseYear", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, same dates", "2020-05-15", "2020-05-15", "", "2020-05-15"),
)
})

View File

@ -75,6 +75,23 @@ var _ = Describe("ToMediaFile", func() {
Expect(mf.OriginalYear).To(Equal(1966))
Expect(mf.ReleaseYear).To(Equal(2014))
})
DescribeTable("legacyReleaseDate (TaggedLikePicard old behavior)",
func(recordingDate, originalDate, releaseDate, expected string) {
mf := toMediaFile(model.RawTags{
"DATE": {recordingDate},
"ORIGINALDATE": {originalDate},
"RELEASEDATE": {releaseDate},
})
Expect(mf.ReleaseDate).To(Equal(expected))
},
Entry("regular mapping", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping", "2020-05-15", "2019-02-10", "", "2020-05-15"),
Entry("legacy mapping, originalYear < year", "2018-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, originalYear empty", "2020-05-15", "", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, releaseYear", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, same dates", "2020-05-15", "2020-05-15", "", "2020-05-15"),
)
})
Describe("Lyrics", func() {

81
model/scanner.go Normal file
View File

@ -0,0 +1,81 @@
package model
import (
"context"
"fmt"
"strconv"
"strings"
"time"
)
// ScanTarget represents a specific folder within a library to be scanned.
// NOTE: This struct is used as a map key, so it should only contain comparable types.
type ScanTarget struct {
LibraryID int
FolderPath string // Relative path within the library, or "" for entire library
}
func (st ScanTarget) String() string {
return fmt.Sprintf("%d:%s", st.LibraryID, st.FolderPath)
}
// ScannerStatus holds information about the current scan status
type ScannerStatus struct {
Scanning bool
LastScan time.Time
Count uint32
FolderCount uint32
LastError string
ScanType string
ElapsedTime time.Duration
}
type Scanner interface {
// ScanAll starts a scan of all libraries. This is a blocking operation.
ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error)
// ScanFolders scans specific library/folder pairs, recursing into subdirectories.
// If targets is nil, it scans all libraries. This is a blocking operation.
ScanFolders(ctx context.Context, fullScan bool, targets []ScanTarget) (warnings []string, err error)
Status(context.Context) (*ScannerStatus, error)
}
// ParseTargets parses scan targets strings into ScanTarget structs.
// Example: []string{"1:Music/Rock", "2:Classical"}
func ParseTargets(libFolders []string) ([]ScanTarget, error) {
targets := make([]ScanTarget, 0, len(libFolders))
for _, part := range libFolders {
part = strings.TrimSpace(part)
if part == "" {
continue
}
// Split by the first colon
colonIdx := strings.Index(part, ":")
if colonIdx == -1 {
return nil, fmt.Errorf("invalid target format: %q (expected libraryID:folderPath)", part)
}
libIDStr := part[:colonIdx]
folderPath := part[colonIdx+1:]
libID, err := strconv.Atoi(libIDStr)
if err != nil {
return nil, fmt.Errorf("invalid library ID %q: %w", libIDStr, err)
}
if libID <= 0 {
return nil, fmt.Errorf("invalid library ID %q", libIDStr)
}
targets = append(targets, ScanTarget{
LibraryID: libID,
FolderPath: folderPath,
})
}
if len(targets) == 0 {
return nil, fmt.Errorf("no valid targets found")
}
return targets, nil
}

89
model/scanner_test.go Normal file
View File

@ -0,0 +1,89 @@
package model_test
import (
"github.com/navidrome/navidrome/model"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("ParseTargets", func() {
It("parses multiple entries in slice", func() {
targets, err := model.ParseTargets([]string{"1:Music/Rock", "1:Music/Jazz", "2:Classical"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(3))
Expect(targets[0].LibraryID).To(Equal(1))
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
Expect(targets[1].LibraryID).To(Equal(1))
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
Expect(targets[2].LibraryID).To(Equal(2))
Expect(targets[2].FolderPath).To(Equal("Classical"))
})
It("handles empty folder paths", func() {
targets, err := model.ParseTargets([]string{"1:", "2:"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].FolderPath).To(Equal(""))
Expect(targets[1].FolderPath).To(Equal(""))
})
It("trims whitespace from entries", func() {
targets, err := model.ParseTargets([]string{" 1:Music/Rock", " 2:Classical "})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].LibraryID).To(Equal(1))
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
Expect(targets[1].LibraryID).To(Equal(2))
Expect(targets[1].FolderPath).To(Equal("Classical"))
})
It("skips empty strings", func() {
targets, err := model.ParseTargets([]string{"1:Music/Rock", "", "2:Classical"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
})
It("handles paths with colons", func() {
targets, err := model.ParseTargets([]string{"1:C:/Music/Rock", "2:/path:with:colons"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].FolderPath).To(Equal("C:/Music/Rock"))
Expect(targets[1].FolderPath).To(Equal("/path:with:colons"))
})
It("returns error for invalid format without colon", func() {
_, err := model.ParseTargets([]string{"1Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid target format"))
})
It("returns error for non-numeric library ID", func() {
_, err := model.ParseTargets([]string{"abc:Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid library ID"))
})
It("returns error for negative library ID", func() {
_, err := model.ParseTargets([]string{"-1:Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid library ID"))
})
It("returns error for zero library ID", func() {
_, err := model.ParseTargets([]string{"0:Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid library ID"))
})
It("returns error for empty input", func() {
_, err := model.ParseTargets([]string{})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("no valid targets found"))
})
It("returns error for all empty strings", func() {
_, err := model.ParseTargets([]string{"", " ", ""})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("no valid targets found"))
})
})

View File

@ -337,8 +337,12 @@ on conflict (user_id, item_id, item_type) do update
return r.executeSQL(query)
}
func (r *albumRepository) purgeEmpty() error {
func (r *albumRepository) purgeEmpty(libraryIDs ...int) error {
del := Delete(r.tableName).Where("id not in (select distinct(album_id) from media_file)")
// If libraryIDs are specified, only purge albums from those libraries
if len(libraryIDs) > 0 {
del = del.Where(Eq{"library_id": libraryIDs})
}
c, err := r.executeSQL(del)
if err != nil {
return fmt.Errorf("purging empty albums: %w", err)

View File

@ -55,6 +55,7 @@ var _ = Describe("AlbumRepository", func() {
It("returns all records sorted", func() {
Expect(GetAll(model.QueryOptions{Sort: "name"})).To(Equal(model.Albums{
albumAbbeyRoad,
albumMultiDisc,
albumRadioactivity,
albumSgtPeppers,
}))
@ -64,6 +65,7 @@ var _ = Describe("AlbumRepository", func() {
Expect(GetAll(model.QueryOptions{Sort: "name", Order: "desc"})).To(Equal(model.Albums{
albumSgtPeppers,
albumRadioactivity,
albumMultiDisc,
albumAbbeyRoad,
}))
})

View File

@ -400,23 +400,16 @@ func (r *artistRepository) RefreshStats(allArtists bool) (int64, error) {
// This now calculates per-library statistics and stores them in library_artist.stats
batchUpdateStatsSQL := `
WITH artist_role_counters AS (
SELECT jt.atom AS artist_id,
SELECT mfa.artist_id,
mf.library_id,
substr(
replace(jt.path, '$.', ''),
1,
CASE WHEN instr(replace(jt.path, '$.', ''), '[') > 0
THEN instr(replace(jt.path, '$.', ''), '[') - 1
ELSE length(replace(jt.path, '$.', ''))
END
) AS role,
mfa.role,
count(DISTINCT mf.album_id) AS album_count,
count(mf.id) AS count,
count(DISTINCT mf.id) AS count,
sum(mf.size) AS size
FROM media_file mf
JOIN json_tree(mf.participants) jt ON jt.key = 'id' AND jt.atom IS NOT NULL
WHERE jt.atom IN (ROLE_IDS_PLACEHOLDER) -- Will replace with actual placeholders
GROUP BY jt.atom, mf.library_id, role
FROM media_file_artists mfa
JOIN media_file mf ON mfa.media_file_id = mf.id
WHERE mfa.artist_id IN (ROLE_IDS_PLACEHOLDER) -- Will replace with actual placeholders
GROUP BY mfa.artist_id, mf.library_id, mfa.role
),
artist_total_counters AS (
SELECT mfa.artist_id,
@ -445,24 +438,24 @@ func (r *artistRepository) RefreshStats(allArtists bool) (int64, error) {
),
combined_counters AS (
SELECT artist_id, library_id, role, album_count, count, size FROM artist_role_counters
UNION
UNION ALL
SELECT artist_id, library_id, role, album_count, count, size FROM artist_total_counters
UNION
UNION ALL
SELECT artist_id, library_id, role, album_count, count, size FROM artist_participant_counter
),
library_artist_counters AS (
SELECT artist_id,
library_id,
json_group_object(
replace(role, '"', ''),
role,
json_object('a', album_count, 'm', count, 's', size)
) AS counters
FROM combined_counters
GROUP BY artist_id, library_id
)
UPDATE library_artist
SET stats = coalesce((SELECT counters FROM library_artist_counters lac
WHERE lac.artist_id = library_artist.artist_id
SET stats = coalesce((SELECT counters FROM library_artist_counters lac
WHERE lac.artist_id = library_artist.artist_id
AND lac.library_id = library_artist.library_id), '{}')
WHERE library_artist.artist_id IN (ROLE_IDS_PLACEHOLDER);` // Will replace with actual placeholders

View File

@ -4,7 +4,10 @@ import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"slices"
"strings"
"time"
. "github.com/Masterminds/squirrel"
@ -91,8 +94,47 @@ func (r folderRepository) CountAll(opt ...model.QueryOptions) (int64, error) {
return r.count(query)
}
func (r folderRepository) GetLastUpdates(lib model.Library) (map[string]model.FolderUpdateInfo, error) {
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(Eq{"library_id": lib.ID, "missing": false})
func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...string) (map[string]model.FolderUpdateInfo, error) {
where := And{
Eq{"library_id": lib.ID},
Eq{"missing": false},
}
// If specific paths are requested, include those folders and all their descendants
if len(targetPaths) > 0 {
// Collect folder IDs for exact target folders and path conditions for descendants
folderIDs := make([]string, 0, len(targetPaths))
pathConditions := make(Or, 0, len(targetPaths)*2)
for _, targetPath := range targetPaths {
if targetPath == "" || targetPath == "." {
// Root path - include everything in this library
pathConditions = Or{}
folderIDs = nil
break
}
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
cleanPath = filepath.Clean(cleanPath)
// Include the target folder itself by ID
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
// Include all descendants: folders whose path field equals or starts with the target path
// Note: Folder.Path is the directory path, so children have path = targetPath
pathConditions = append(pathConditions, Eq{"path": cleanPath})
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
}
// Combine conditions: exact folder IDs OR descendant path patterns
if len(folderIDs) > 0 {
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
} else if len(pathConditions) > 0 {
where = append(where, pathConditions)
}
}
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(where)
var res []struct {
ID string
UpdatedAt time.Time
@ -149,7 +191,7 @@ func (r folderRepository) GetTouchedWithPlaylists() (model.FolderCursor, error)
}, nil
}
func (r folderRepository) purgeEmpty() error {
func (r folderRepository) purgeEmpty(libraryIDs ...int) error {
sq := Delete(r.tableName).Where(And{
Eq{"num_audio_files": 0},
Eq{"num_playlists": 0},
@ -157,6 +199,10 @@ func (r folderRepository) purgeEmpty() error {
ConcatExpr("id not in (select parent_id from folder)"),
ConcatExpr("id not in (select folder_id from media_file)"),
})
// If libraryIDs are specified, only purge folders from those libraries
if len(libraryIDs) > 0 {
sq = sq.Where(Eq{"library_id": libraryIDs})
}
c, err := r.executeSQL(sq)
if err != nil {
return fmt.Errorf("purging empty folders: %w", err)

View File

@ -0,0 +1,213 @@
package persistence
import (
"context"
"fmt"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/pocketbase/dbx"
)
var _ = Describe("FolderRepository", func() {
var repo model.FolderRepository
var ctx context.Context
var conn *dbx.DB
var testLib, otherLib model.Library
BeforeEach(func() {
ctx = request.WithUser(log.NewContext(context.TODO()), model.User{ID: "userid"})
conn = GetDBXBuilder()
repo = newFolderRepository(ctx, conn)
// Use existing library ID 1 from test fixtures
libRepo := NewLibraryRepository(ctx, conn)
lib, err := libRepo.Get(1)
Expect(err).ToNot(HaveOccurred())
testLib = *lib
// Create a second library with its own folder to verify isolation
otherLib = model.Library{Name: "Other Library", Path: "/other/path"}
Expect(libRepo.Put(&otherLib)).To(Succeed())
})
AfterEach(func() {
// Clean up only test folders created by our tests (paths starting with "Test")
// This prevents interference with fixture data needed by other tests
_, _ = conn.NewQuery("DELETE FROM folder WHERE library_id = 1 AND path LIKE 'Test%'").Execute()
_, _ = conn.NewQuery(fmt.Sprintf("DELETE FROM library WHERE id = %d", otherLib.ID)).Execute()
})
Describe("GetFolderUpdateInfo", func() {
Context("with no target paths", func() {
It("returns all folders in the library", func() {
// Create test folders with unique names to avoid conflicts
folder1 := model.NewFolder(testLib, "TestGetLastUpdates/Folder1")
folder2 := model.NewFolder(testLib, "TestGetLastUpdates/Folder2")
err := repo.Put(folder1)
Expect(err).ToNot(HaveOccurred())
err = repo.Put(folder2)
Expect(err).ToNot(HaveOccurred())
otherFolder := model.NewFolder(otherLib, "TestOtherLib/Folder")
err = repo.Put(otherFolder)
Expect(err).ToNot(HaveOccurred())
// Query all folders (no target paths) - should only return folders from testLib
results, err := repo.GetFolderUpdateInfo(testLib)
Expect(err).ToNot(HaveOccurred())
// Should include folders from testLib
Expect(results).To(HaveKey(folder1.ID))
Expect(results).To(HaveKey(folder2.ID))
// Should NOT include folders from other library
Expect(results).ToNot(HaveKey(otherFolder.ID))
})
})
Context("with specific target paths", func() {
It("returns folder info for existing folders", func() {
// Create test folders with unique names
folder1 := model.NewFolder(testLib, "TestSpecific/Rock")
folder2 := model.NewFolder(testLib, "TestSpecific/Jazz")
folder3 := model.NewFolder(testLib, "TestSpecific/Classical")
err := repo.Put(folder1)
Expect(err).ToNot(HaveOccurred())
err = repo.Put(folder2)
Expect(err).ToNot(HaveOccurred())
err = repo.Put(folder3)
Expect(err).ToNot(HaveOccurred())
// Query specific paths
results, err := repo.GetFolderUpdateInfo(testLib, "TestSpecific/Rock", "TestSpecific/Classical")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
// Verify folder IDs are in results
Expect(results).To(HaveKey(folder1.ID))
Expect(results).To(HaveKey(folder3.ID))
Expect(results).ToNot(HaveKey(folder2.ID))
// Verify update info is populated
Expect(results[folder1.ID].UpdatedAt).ToNot(BeZero())
Expect(results[folder1.ID].Hash).To(Equal(folder1.Hash))
})
It("includes all child folders when querying parent", func() {
// Create a parent folder with multiple children
parent := model.NewFolder(testLib, "TestParent/Music")
child1 := model.NewFolder(testLib, "TestParent/Music/Rock/Queen")
child2 := model.NewFolder(testLib, "TestParent/Music/Jazz")
otherParent := model.NewFolder(testLib, "TestParent2/Music/Jazz")
Expect(repo.Put(parent)).To(Succeed())
Expect(repo.Put(child1)).To(Succeed())
Expect(repo.Put(child2)).To(Succeed())
// Query the parent folder - should return parent and all children
results, err := repo.GetFolderUpdateInfo(testLib, "TestParent/Music")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(3))
Expect(results).To(HaveKey(parent.ID))
Expect(results).To(HaveKey(child1.ID))
Expect(results).To(HaveKey(child2.ID))
Expect(results).ToNot(HaveKey(otherParent.ID))
})
It("excludes children from other libraries", func() {
// Create parent in testLib
parent := model.NewFolder(testLib, "TestIsolation/Parent")
child := model.NewFolder(testLib, "TestIsolation/Parent/Child")
Expect(repo.Put(parent)).To(Succeed())
Expect(repo.Put(child)).To(Succeed())
// Create similar path in other library
otherParent := model.NewFolder(otherLib, "TestIsolation/Parent")
otherChild := model.NewFolder(otherLib, "TestIsolation/Parent/Child")
Expect(repo.Put(otherParent)).To(Succeed())
Expect(repo.Put(otherChild)).To(Succeed())
// Query should only return folders from testLib
results, err := repo.GetFolderUpdateInfo(testLib, "TestIsolation/Parent")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results).To(HaveKey(parent.ID))
Expect(results).To(HaveKey(child.ID))
Expect(results).ToNot(HaveKey(otherParent.ID))
Expect(results).ToNot(HaveKey(otherChild.ID))
})
It("excludes missing children when querying parent", func() {
// Create parent and children, mark one as missing
parent := model.NewFolder(testLib, "TestMissingChild/Parent")
child1 := model.NewFolder(testLib, "TestMissingChild/Parent/Child1")
child2 := model.NewFolder(testLib, "TestMissingChild/Parent/Child2")
child2.Missing = true
Expect(repo.Put(parent)).To(Succeed())
Expect(repo.Put(child1)).To(Succeed())
Expect(repo.Put(child2)).To(Succeed())
// Query parent - should only return parent and non-missing child
results, err := repo.GetFolderUpdateInfo(testLib, "TestMissingChild/Parent")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results).To(HaveKey(parent.ID))
Expect(results).To(HaveKey(child1.ID))
Expect(results).ToNot(HaveKey(child2.ID))
})
It("handles mix of existing and non-existing target paths", func() {
// Create folders for one path but not the other
existingParent := model.NewFolder(testLib, "TestMixed/Exists")
existingChild := model.NewFolder(testLib, "TestMixed/Exists/Child")
Expect(repo.Put(existingParent)).To(Succeed())
Expect(repo.Put(existingChild)).To(Succeed())
// Query both existing and non-existing paths
results, err := repo.GetFolderUpdateInfo(testLib, "TestMixed/Exists", "TestMixed/DoesNotExist")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results).To(HaveKey(existingParent.ID))
Expect(results).To(HaveKey(existingChild.ID))
})
It("handles empty folder path as root", func() {
// Test querying for root folder without creating it (fixtures should have one)
rootFolderID := model.FolderID(testLib, ".")
results, err := repo.GetFolderUpdateInfo(testLib, "")
Expect(err).ToNot(HaveOccurred())
// Should return the root folder if it exists
if len(results) > 0 {
Expect(results).To(HaveKey(rootFolderID))
}
})
It("returns empty map for non-existent folders", func() {
results, err := repo.GetFolderUpdateInfo(testLib, "NonExistent/Path")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(BeEmpty())
})
It("skips missing folders", func() {
// Create a folder and mark it as missing
folder := model.NewFolder(testLib, "TestMissing/Folder")
folder.Missing = true
err := repo.Put(folder)
Expect(err).ToNot(HaveOccurred())
results, err := repo.GetFolderUpdateInfo(testLib, "TestMissing/Folder")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(BeEmpty())
})
})
})
})

View File

@ -177,7 +177,9 @@ func (r *libraryRepository) ScanEnd(id int) error {
return err
}
// https://www.sqlite.org/pragma.html#pragma_optimize
_, err = r.executeSQL(Expr("PRAGMA optimize=0x10012;"))
// Use mask 0x10000 to check table sizes without running ANALYZE
// Running ANALYZE can cause query planner issues with expression-based collation indexes
_, err = r.executeSQL(Expr("PRAGMA optimize=0x10000;"))
return err
}

View File

@ -142,4 +142,62 @@ var _ = Describe("LibraryRepository", func() {
Expect(libAfter.TotalSize).To(Equal(sizeRes.Sum))
Expect(libAfter.TotalDuration).To(Equal(durationRes.Sum))
})
Describe("ScanBegin and ScanEnd", func() {
var lib *model.Library
BeforeEach(func() {
lib = &model.Library{
ID: 0,
Name: "Test Scan Library",
Path: "/music/test-scan",
}
err := repo.Put(lib)
Expect(err).ToNot(HaveOccurred())
})
DescribeTable("ScanBegin",
func(fullScan bool, expectedFullScanInProgress bool) {
err := repo.ScanBegin(lib.ID, fullScan)
Expect(err).ToNot(HaveOccurred())
updatedLib, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(updatedLib.LastScanStartedAt).ToNot(BeZero())
Expect(updatedLib.FullScanInProgress).To(Equal(expectedFullScanInProgress))
},
Entry("sets FullScanInProgress to true for full scan", true, true),
Entry("sets FullScanInProgress to false for quick scan", false, false),
)
Context("ScanEnd", func() {
BeforeEach(func() {
err := repo.ScanBegin(lib.ID, true)
Expect(err).ToNot(HaveOccurred())
})
It("sets LastScanAt and clears FullScanInProgress and LastScanStartedAt", func() {
err := repo.ScanEnd(lib.ID)
Expect(err).ToNot(HaveOccurred())
updatedLib, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(updatedLib.LastScanAt).ToNot(BeZero())
Expect(updatedLib.FullScanInProgress).To(BeFalse())
Expect(updatedLib.LastScanStartedAt).To(BeZero())
})
It("sets LastScanAt to be after LastScanStartedAt", func() {
libBefore, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
err = repo.ScanEnd(lib.ID)
Expect(err).ToNot(HaveOccurred())
libAfter, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(libAfter.LastScanAt).To(BeTemporally(">=", libBefore.LastScanStartedAt))
})
})
})
})

View File

@ -38,7 +38,7 @@ var _ = Describe("MediaRepository", func() {
})
It("counts the number of mediafiles in the DB", func() {
Expect(mr.CountAll()).To(Equal(int64(6)))
Expect(mr.CountAll()).To(Equal(int64(10)))
})
It("returns songs ordered by lyrics with a specific title/artist", func() {

View File

@ -157,7 +157,7 @@ func (s *SQLStore) WithTxImmediate(block func(tx model.DataStore) error, scope .
}, scope...)
}
func (s *SQLStore) GC(ctx context.Context) error {
func (s *SQLStore) GC(ctx context.Context, libraryIDs ...int) error {
trace := func(ctx context.Context, msg string, f func() error) func() error {
return func() error {
start := time.Now()
@ -167,11 +167,17 @@ func (s *SQLStore) GC(ctx context.Context) error {
}
}
// If libraryIDs are provided, scope operations to those libraries where possible
scoped := len(libraryIDs) > 0
if scoped {
log.Debug(ctx, "GC: Running selective garbage collection", "libraryIDs", libraryIDs)
}
err := run.Sequentially(
trace(ctx, "purge empty albums", func() error { return s.Album(ctx).(*albumRepository).purgeEmpty() }),
trace(ctx, "purge empty albums", func() error { return s.Album(ctx).(*albumRepository).purgeEmpty(libraryIDs...) }),
trace(ctx, "purge empty artists", func() error { return s.Artist(ctx).(*artistRepository).purgeEmpty() }),
trace(ctx, "mark missing artists", func() error { return s.Artist(ctx).(*artistRepository).markMissing() }),
trace(ctx, "purge empty folders", func() error { return s.Folder(ctx).(*folderRepository).purgeEmpty() }),
trace(ctx, "purge empty folders", func() error { return s.Folder(ctx).(*folderRepository).purgeEmpty(libraryIDs...) }),
trace(ctx, "clean album annotations", func() error { return s.Album(ctx).(*albumRepository).cleanAnnotations() }),
trace(ctx, "clean artist annotations", func() error { return s.Artist(ctx).(*artistRepository).cleanAnnotations() }),
trace(ctx, "clean media file annotations", func() error { return s.MediaFile(ctx).(*mediaFileRepository).cleanAnnotations() }),

View File

@ -69,10 +69,12 @@ var (
albumSgtPeppers = al(model.Album{ID: "101", Name: "Sgt Peppers", AlbumArtist: "The Beatles", OrderAlbumName: "sgt peppers", AlbumArtistID: "3", EmbedArtPath: p("/beatles/1/sgt/a day.mp3"), SongCount: 1, MaxYear: 1967})
albumAbbeyRoad = al(model.Album{ID: "102", Name: "Abbey Road", AlbumArtist: "The Beatles", OrderAlbumName: "abbey road", AlbumArtistID: "3", EmbedArtPath: p("/beatles/1/come together.mp3"), SongCount: 1, MaxYear: 1969})
albumRadioactivity = al(model.Album{ID: "103", Name: "Radioactivity", AlbumArtist: "Kraftwerk", OrderAlbumName: "radioactivity", AlbumArtistID: "2", EmbedArtPath: p("/kraft/radio/radio.mp3"), SongCount: 2})
albumMultiDisc = al(model.Album{ID: "104", Name: "Multi Disc Album", AlbumArtist: "Test Artist", OrderAlbumName: "multi disc album", AlbumArtistID: "1", EmbedArtPath: p("/test/multi/disc1/track1.mp3"), SongCount: 4})
testAlbums = model.Albums{
albumSgtPeppers,
albumAbbeyRoad,
albumRadioactivity,
albumMultiDisc,
}
)
@ -94,13 +96,22 @@ var (
Lyrics: `[{"lang":"xxx","line":[{"value":"This is a set of lyrics"}],"synced":false}]`,
})
songAntenna2 = mf(model.MediaFile{ID: "1006", Title: "Antenna", ArtistID: "2", Artist: "Kraftwerk", AlbumID: "103"})
testSongs = model.MediaFiles{
// Multi-disc album tracks (intentionally out of order to test sorting)
songDisc2Track11 = mf(model.MediaFile{ID: "2001", Title: "Disc 2 Track 11", ArtistID: "1", Artist: "Test Artist", AlbumID: "104", Album: "Multi Disc Album", DiscNumber: 2, TrackNumber: 11, Path: p("/test/multi/disc2/track11.mp3"), OrderAlbumName: "multi disc album", OrderArtistName: "test artist"})
songDisc1Track01 = mf(model.MediaFile{ID: "2002", Title: "Disc 1 Track 1", ArtistID: "1", Artist: "Test Artist", AlbumID: "104", Album: "Multi Disc Album", DiscNumber: 1, TrackNumber: 1, Path: p("/test/multi/disc1/track1.mp3"), OrderAlbumName: "multi disc album", OrderArtistName: "test artist"})
songDisc2Track01 = mf(model.MediaFile{ID: "2003", Title: "Disc 2 Track 1", ArtistID: "1", Artist: "Test Artist", AlbumID: "104", Album: "Multi Disc Album", DiscNumber: 2, TrackNumber: 1, Path: p("/test/multi/disc2/track1.mp3"), OrderAlbumName: "multi disc album", OrderArtistName: "test artist"})
songDisc1Track02 = mf(model.MediaFile{ID: "2004", Title: "Disc 1 Track 2", ArtistID: "1", Artist: "Test Artist", AlbumID: "104", Album: "Multi Disc Album", DiscNumber: 1, TrackNumber: 2, Path: p("/test/multi/disc1/track2.mp3"), OrderAlbumName: "multi disc album", OrderArtistName: "test artist"})
testSongs = model.MediaFiles{
songDayInALife,
songComeTogether,
songRadioactivity,
songAntenna,
songAntennaWithLyrics,
songAntenna2,
songDisc2Track11,
songDisc1Track01,
songDisc2Track01,
songDisc1Track02,
}
)

View File

@ -219,4 +219,37 @@ var _ = Describe("PlaylistRepository", func() {
})
})
})
Describe("Playlist Track Sorting", func() {
var testPlaylistID string
AfterEach(func() {
if testPlaylistID != "" {
Expect(repo.Delete(testPlaylistID)).To(BeNil())
testPlaylistID = ""
}
})
It("sorts tracks correctly by album (disc and track number)", func() {
By("creating a playlist with multi-disc album tracks in arbitrary order")
newPls := model.Playlist{Name: "Multi-Disc Test", OwnerID: "userid"}
// Add tracks in intentionally scrambled order
newPls.AddMediaFilesByID([]string{"2001", "2002", "2003", "2004"})
Expect(repo.Put(&newPls)).To(Succeed())
testPlaylistID = newPls.ID
By("retrieving tracks sorted by album")
tracksRepo := repo.Tracks(newPls.ID, false)
tracks, err := tracksRepo.GetAll(model.QueryOptions{Sort: "album", Order: "asc"})
Expect(err).ToNot(HaveOccurred())
By("verifying tracks are sorted by disc number then track number")
Expect(tracks).To(HaveLen(4))
// Expected order: Disc 1 Track 1, Disc 1 Track 2, Disc 2 Track 1, Disc 2 Track 11
Expect(tracks[0].MediaFileID).To(Equal("2002")) // Disc 1, Track 1
Expect(tracks[1].MediaFileID).To(Equal("2004")) // Disc 1, Track 2
Expect(tracks[2].MediaFileID).To(Equal("2003")) // Disc 2, Track 1
Expect(tracks[3].MediaFileID).To(Equal("2001")) // Disc 2, Track 11
})
})
})

View File

@ -55,7 +55,7 @@ func (r *playlistRepository) Tracks(playlistId string, refreshSmartPlaylist bool
"id": "playlist_tracks.id",
"artist": "order_artist_name",
"album_artist": "order_album_artist_name",
"album": "order_album_name, order_album_artist_name",
"album": "order_album_name, album_id, disc_number, track_number, order_artist_name, title",
"title": "order_title",
// To make sure these fields will be whitelisted
"duration": "duration",

View File

@ -57,6 +57,7 @@ func NewUserRepository(ctx context.Context, db dbx.Builder) model.UserRepository
r.db = db
r.tableName = "user"
r.registerModel(&model.User{}, map[string]filterFunc{
"id": idFilter(r.tableName),
"password": invalidFilter(ctx),
"name": r.withTableName(startsWithFilter),
})

View File

@ -559,4 +559,15 @@ var _ = Describe("UserRepository", func() {
Expect(user.Libraries[0].ID).To(Equal(1))
})
})
Describe("filters", func() {
It("qualifies id filter with table name", func() {
r := repo.(*userRepository)
qo := r.parseRestOptions(r.ctx, rest.QueryOptions{Filters: map[string]any{"id": "123"}})
sel := r.selectUserWithLibraries(qo)
query, _, err := r.toSQL(sel)
Expect(err).NotTo(HaveOccurred())
Expect(query).To(ContainSubstring("user.id = {:p0}"))
})
})
})

View File

@ -93,8 +93,12 @@ func (s *subsonicAPIServiceImpl) Call(ctx context.Context, req *subsonicapi.Call
RawQuery: query.Encode(),
}
// Create HTTP request with internal authentication
httpReq, err := http.NewRequestWithContext(ctx, "GET", finalURL.String(), nil)
// Create HTTP request with a fresh context to avoid Chi RouteContext pollution.
// Using http.NewRequest (instead of http.NewRequestWithContext) ensures the internal
// SubsonicAPI call doesn't inherit routing information from the parent handler,
// which would cause Chi to invoke the wrong handler. Authentication context is
// explicitly added in the next step via request.WithInternalAuth.
httpReq, err := http.NewRequest("GET", finalURL.String(), nil)
if err != nil {
return &subsonicapi.CallResponse{
Error: fmt.Sprintf("failed to create HTTP request: %v", err),

View File

@ -4,6 +4,7 @@ import (
"context"
"os"
"path/filepath"
"time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/core/agents"
@ -22,8 +23,11 @@ var _ = Describe("Plugin Manager", func() {
// but, as this is an integration test, we can't use configtest.SetupConfig() as it causes
// data races.
originalPluginsFolder := conf.Server.Plugins.Folder
originalTimeout := conf.Server.DevPluginCompilationTimeout
conf.Server.DevPluginCompilationTimeout = 2 * time.Minute
DeferCleanup(func() {
conf.Server.Plugins.Folder = originalPluginsFolder
conf.Server.DevPluginCompilationTimeout = originalTimeout
})
conf.Server.Plugins.Enabled = true
conf.Server.Plugins.Folder = testDataDir

View File

@ -83,6 +83,15 @@ nfpms:
owner: navidrome
group: navidrome
- src: release/linux/.package.rpm # contents: "rpm"
dst: /var/lib/navidrome/.package
type: "config|noreplace"
packager: rpm
- src: release/linux/.package.deb # contents: "deb"
dst: /var/lib/navidrome/.package
type: "config|noreplace"
packager: deb
scripts:
preinstall: "release/linux/preinstall.sh"
postinstall: "release/linux/postinstall.sh"

View File

@ -0,0 +1 @@
deb

View File

@ -0,0 +1 @@
rpm

View File

@ -49,6 +49,9 @@ cp "${DOWNLOAD_FOLDER}"/extracted_ffmpeg/${FFMPEG_FILE}/bin/ffmpeg.exe "$MSI_OUT
cp "$WORKSPACE"/LICENSE "$WORKSPACE"/README.md "$MSI_OUTPUT_DIR"
cp "$BINARY" "$MSI_OUTPUT_DIR"
# package type indicator file
echo "msi" > "$MSI_OUTPUT_DIR/.package"
# workaround for wixl WixVariable not working to override bmp locations
cp "$WORKSPACE"/release/wix/bmp/banner.bmp /usr/share/wixl-*/ext/ui/bitmaps/bannrbmp.bmp
cp "$WORKSPACE"/release/wix/bmp/dialogue.bmp /usr/share/wixl-*/ext/ui/bitmaps/dlgbmp.bmp

View File

@ -69,6 +69,12 @@
</Directory>
</Directory>
<Directory Id="ND_DATAFOLDER" name="[ND_DATAFOLDER]">
<Component Id='PackageFile' Guid='9eec0697-803c-4629-858f-20dc376c960b' Win64="$(var.Win64)">
<File Id='package' Name='.package' DiskId='1' Source='.package' KeyPath='no' />
</Component>
</Directory>
</Directory>
<InstallUISequence>
@ -81,6 +87,7 @@
<ComponentRef Id='Configuration'/>
<ComponentRef Id='MainExecutable' />
<ComponentRef Id='FFMpegExecutable' />
<ComponentRef Id='PackageFile' />
</Feature>
</Product>
</Wix>

View File

@ -31,8 +31,12 @@
"mood": "Estado",
"participants": "Participantes adicionais",
"tags": "Etiquetas adicionais",
"mappedTags": "",
"rawTags": "Etiquetas en cru"
"mappedTags": "Etiquetas mapeadas",
"rawTags": "Etiquetas en cru",
"bitDepth": "Calidade de Bit",
"sampleRate": "Taxa de mostra",
"missing": "Falta",
"libraryName": "Biblioteca"
},
"actions": {
"addToQueue": "Ao final da cola",
@ -41,7 +45,8 @@
"shuffleAll": "Remexer todo",
"download": "Descargar",
"playNext": "A continuación",
"info": "Obter info"
"info": "Obter info",
"showInPlaylist": "Mostrar en Lista de reprodución"
}
},
"album": {
@ -70,7 +75,10 @@
"releaseType": "Tipo",
"grouping": "Grupos",
"media": "Multimedia",
"mood": "Estado"
"mood": "Estado",
"date": "Data de gravación",
"missing": "Falta",
"libraryName": "Biblioteca"
},
"actions": {
"playAll": "Reproducir",
@ -102,7 +110,8 @@
"rating": "Valoración",
"genre": "Xénero",
"size": "Tamaño",
"role": "Rol"
"role": "Rol",
"missing": "Falta"
},
"roles": {
"albumartist": "Artista do álbum |||| Artistas do álbum",
@ -117,7 +126,13 @@
"mixer": "Mistura |||| Mistura",
"remixer": "Remezcla |||| Remezcla",
"djmixer": "Mezcla DJs |||| Mezcla DJs",
"performer": "Intérprete |||| Intérpretes"
"performer": "Intérprete |||| Intérpretes",
"maincredit": "Artista do álbum ou Artista |||| Artistas do álbum ou Artistas"
},
"actions": {
"shuffle": "Barallar",
"radio": "Radio",
"topSongs": "Cancións destacadas"
}
},
"user": {
@ -134,10 +149,12 @@
"currentPassword": "Contrasinal actual",
"newPassword": "Novo contrasinal",
"token": "Token",
"lastAccessAt": "Último acceso"
"lastAccessAt": "Último acceso",
"libraries": "Bibliotecas"
},
"helperTexts": {
"name": "Os cambios no nome aplicaranse a próxima vez que accedas"
"name": "Os cambios no nome aplicaranse a próxima vez que accedas",
"libraries": "Selecciona bibliotecas específicas para esta usuaria, ou deixa baleiro para usar as bibliotecas por defecto"
},
"notifications": {
"created": "Creouse a usuaria",
@ -146,7 +163,12 @@
},
"message": {
"listenBrainzToken": "Escribe o token de usuaria de ListenBrainz",
"clickHereForToken": "Preme aquí para obter o token"
"clickHereForToken": "Preme aquí para obter o token",
"selectAllLibraries": "Seleccionar todas as bibliotecas",
"adminAutoLibraries": "As usuarias Admin teñen acceso por defecto a todas as bibliotecas"
},
"validation": {
"librariesRequired": "Debes seleccionar polo menos unha biblioteca para usuarias non admins"
}
},
"player": {
@ -190,11 +212,17 @@
"addNewPlaylist": "Crear \"%{name}\"",
"export": "Exportar",
"makePublic": "Facela Pública",
"makePrivate": "Facela Privada"
"makePrivate": "Facela Privada",
"saveQueue": "Salvar a Cola como Lista de reprodución",
"searchOrCreate": "Buscar listas ou escribe para crear nova…",
"pressEnterToCreate": "Preme Enter para crear nova lista",
"removeFromSelection": "Retirar da selección"
},
"message": {
"duplicate_song": "Engadir cancións duplicadas",
"song_exist": "Hai duplicadas que serán engadidas á lista de reprodución. Desexas engadir as duplicadas ou omitilas?"
"song_exist": "Hai duplicadas que serán engadidas á lista de reprodución. Desexas engadir as duplicadas ou omitilas?",
"noPlaylistsFound": "Sen listas de reprodución",
"noPlaylists": "Sen listas dispoñibles"
}
},
"radio": {
@ -232,13 +260,68 @@
"fields": {
"path": "Ruta",
"size": "Tamaño",
"updatedAt": "Desapareceu o"
"updatedAt": "Desapareceu o",
"libraryName": "Biblioteca"
},
"actions": {
"remove": "Retirar"
"remove": "Retirar",
"remove_all": "Retirar todo"
},
"notifications": {
"removed": "Ficheiro(s) faltantes retirados"
},
"empty": "Sen ficheiros faltantes"
},
"library": {
"name": "Biblioteca |||| Bibliotecas",
"fields": {
"name": "Nome",
"path": "Ruta",
"remotePath": "Ruta remota",
"lastScanAt": "Último escaneado",
"songCount": "Cancións",
"albumCount": "Álbums",
"artistCount": "Artistas",
"totalSongs": "Cancións",
"totalAlbums": "Álbums",
"totalArtists": "Artistas",
"totalFolders": "Cartafoles",
"totalFiles": "Ficheiros",
"totalMissingFiles": "Ficheiros que faltan",
"totalSize": "Tamaño total",
"totalDuration": "Duración",
"defaultNewUsers": "Por defecto para novas usuarias",
"createdAt": "Creada",
"updatedAt": "Actualizada"
},
"sections": {
"basic": "Información básica",
"statistics": "Estatísticas"
},
"actions": {
"scan": "Escanear Biblioteca",
"manageUsers": "Xestionar acceso das usuarias",
"viewDetails": "Ver detalles"
},
"notifications": {
"created": "Biblioteca creada correctamente",
"updated": "Biblioteca actualizada correctamente",
"deleted": "Biblioteca eliminada correctamente",
"scanStarted": "Comezou o escaneo da biblioteca",
"scanCompleted": "Completouse o escaneado da biblioteca"
},
"validation": {
"nameRequired": "Requírese un nome para a biblioteca",
"pathRequired": "Requírese unha ruta para a biblioteca",
"pathNotDirectory": "A ruta á biblioteca ten que ser un directorio",
"pathNotFound": "Non se atopa a ruta á biblioteca",
"pathNotAccessible": "A ruta á biblioteca non é accesible",
"pathInvalid": "Ruta non válida á biblioteca"
},
"messages": {
"deleteConfirm": "Tes certeza de querer eliminar esta biblioteca? Isto eliminará todos os datos asociados e accesos de usuarias.",
"scanInProgress": "Escaneo en progreso…",
"noLibrariesAssigned": "Sen bibliotecas asignadas a esta usuaria"
}
}
},
@ -419,7 +502,11 @@
"downloadDialogTitle": "Descargar %{resource} '%{name}' (%{size})",
"shareCopyToClipboard": "Copiar ao portapapeis: Ctrl+C, Enter",
"remove_missing_title": "Retirar ficheiros que faltan",
"remove_missing_content": "Tes certeza de querer retirar da base de datos os ficheiros que faltan? Isto retirará de xeito permanente todas a referencias a eles, incluíndo a conta de reproducións e valoracións."
"remove_missing_content": "Tes certeza de querer retirar da base de datos os ficheiros que faltan? Isto retirará de xeito permanente todas a referencias a eles, incluíndo a conta de reproducións e valoracións.",
"remove_all_missing_title": "Retirar todos os ficheiros que faltan",
"remove_all_missing_content": "Tes certeza de querer retirar da base de datos todos os ficheiros que faltan? Isto eliminará todas as referencias a eles, incluíndo o número de reproducións e valoracións.",
"noSimilarSongsFound": "Sen cancións parecidas",
"noTopSongsFound": "Sen cancións destacadas"
},
"menu": {
"library": "Biblioteca",
@ -448,7 +535,13 @@
"albumList": "Álbums",
"about": "Acerca de",
"playlists": "Listas de reprodución",
"sharedPlaylists": "Listas compartidas"
"sharedPlaylists": "Listas compartidas",
"librarySelector": {
"allLibraries": "Todas as bibliotecas (%{count})",
"multipleLibraries": "%{selected} de %{total} Bibliotecas",
"selectLibraries": "Seleccionar Bibliotecas",
"none": "Ningunha"
}
},
"player": {
"playListsText": "Reproducir cola",
@ -485,6 +578,21 @@
"disabled": "Desactivado",
"waiting": "Agardando"
}
},
"tabs": {
"about": "Sobre",
"config": "Configuración"
},
"config": {
"configName": "Nome",
"environmentVariable": "Variable de entorno",
"currentValue": "Valor actual",
"configurationFile": "Ficheiro de configuración",
"exportToml": "Exportar configuración (TOML)",
"exportSuccess": "Configuración exportada ao portapapeis no formato TOML",
"exportFailed": "Fallou a copia da configuración",
"devFlagsHeader": "Configuracións de Desenvolvemento (suxeitas a cambio/retirada)",
"devFlagsComment": "Son axustes experimentais e poden retirarse en futuras versións"
}
},
"activity": {
@ -493,7 +601,10 @@
"quickScan": "Escaneo rápido",
"fullScan": "Escaneo completo",
"serverUptime": "Servidor a funcionar",
"serverDown": "SEN CONEXIÓN"
"serverDown": "SEN CONEXIÓN",
"scanType": "Tipo",
"status": "Erro de escaneado",
"elapsedTime": "Tempo transcurrido"
},
"help": {
"title": "Atallos de Navidrome",
@ -508,5 +619,10 @@
"toggle_love": "Engadir canción a favoritas",
"current_song": "Ir á Canción actual "
}
},
"nowPlaying": {
"title": "En reprodución",
"empty": "Sen reprodución",
"minutesAgo": "hai %{smart_count} minuto |||| hai %{smart_count} minutos"
}
}

View File

@ -400,8 +400,8 @@
},
"albumList": "Album",
"about": "Info",
"playlists": "Scalette",
"sharedPlaylists": "Scalette Condivise"
"playlists": "Playlist",
"sharedPlaylists": "Playlist Condivise"
},
"player": {
"playListsText": "Coda",
@ -457,4 +457,4 @@
"current_song": ""
}
}
}
}

View File

@ -12,6 +12,7 @@
"artist": "아티스트",
"album": "앨범",
"path": "파일 경로",
"libraryName": "라이브러리",
"genre": "장르",
"compilation": "컴필레이션",
"year": "년",
@ -34,7 +35,8 @@
"participants": "추가 참가자",
"tags": "추가 태그",
"mappedTags": "매핑된 태그",
"rawTags": "원시 태그"
"rawTags": "원시 태그",
"missing": "누락"
},
"actions": {
"addToQueue": "나중에 재생",
@ -56,6 +58,7 @@
"playCount": "재생 횟수",
"size": "크기",
"name": "이름",
"libraryName": "라이브러리",
"genre": "장르",
"compilation": "컴필레이션",
"year": "년",
@ -73,7 +76,8 @@
"releaseType": "유형",
"grouping": "그룹",
"media": "미디어",
"mood": "분위기"
"mood": "분위기",
"missing": "누락"
},
"actions": {
"playAll": "재생",
@ -105,7 +109,8 @@
"playCount": "재생 횟수",
"rating": "평가",
"genre": "장르",
"role": "역할"
"role": "역할",
"missing": "누락"
},
"roles": {
"albumartist": "앨범 아티스트 |||| 앨범 아티스트들",
@ -120,7 +125,13 @@
"mixer": "믹서 |||| 믹서들",
"remixer": "리믹서 |||| 리믹서들",
"djmixer": "DJ 믹서 |||| DJ 믹서들",
"performer": "공연자 |||| 공연자들"
"performer": "공연자 |||| 공연자들",
"maincredit": "앨범 아티스트 또는 아티스트 |||| 앨범 아티스트들 또는 아티스트들"
},
"actions": {
"topSongs": "인기곡",
"shuffle": "셔플",
"radio": "라디오"
}
},
"user": {
@ -137,19 +148,26 @@
"changePassword": "비밀번호를 변경할까요?",
"currentPassword": "현재 비밀번호",
"newPassword": "새 비밀번호",
"token": "토큰"
"token": "토큰",
"libraries": "라이브러리"
},
"helperTexts": {
"name": "이름 변경 사항은 다음 로그인 시에만 반영됨"
"name": "이름 변경 사항은 다음 로그인 시에만 반영됨",
"libraries": "이 사용자에 대한 특정 라이브러리를 선택하거나 기본 라이브러리를 사용하려면 비움"
},
"notifications": {
"created": "사용자 생성됨",
"updated": "사용자 업데이트됨",
"deleted": "사용자 삭제됨"
},
"validation": {
"librariesRequired": "관리자가 아닌 사용자의 경우 최소한 하나의 라이브러리를 선택해야 함"
},
"message": {
"listenBrainzToken": "ListenBrainz 사용자 토큰을 입력하세요.",
"clickHereForToken": "여기를 클릭하여 토큰을 얻으세요"
"clickHereForToken": "여기를 클릭하여 토큰을 얻으세요",
"selectAllLibraries": "모든 라이브러리 선택",
"adminAutoLibraries": "관리자 사용자는 자동으로 모든 라이브러리에 접속할 수 있음"
}
},
"player": {
@ -192,12 +210,18 @@
"selectPlaylist": "재생목록 선택:",
"addNewPlaylist": "\"%{name}\" 만들기",
"export": "내보내기",
"saveQueue": "재생목록에 대기열 저장",
"makePublic": "공개 만들기",
"makePrivate": "비공개 만들기"
"makePrivate": "비공개 만들기",
"searchOrCreate": "재생목록을 검색하거나 입력하여 새 재생목록을 만드세요...",
"pressEnterToCreate": "새 재생목록을 만드려면 Enter 키를 누름",
"removeFromSelection": "선택에서 제거"
},
"message": {
"duplicate_song": "중복된 노래 추가",
"song_exist": "이미 재생목록에 존재하는 노래입니다. 중복을 추가할까요 아니면 건너뛸까요?"
"song_exist": "이미 재생목록에 존재하는 노래입니다. 중복을 추가할까요 아니면 건너뛸까요?",
"noPlaylistsFound": "재생목록을 찾을 수 없음",
"noPlaylists": "사용 가능한 재생 목록이 없음"
}
},
"radio": {
@ -238,14 +262,68 @@
"fields": {
"path": "경로",
"size": "크기",
"libraryName": "라이브러리",
"updatedAt": "사라짐"
},
"actions": {
"remove": "제거"
"remove": "제거",
"remove_all": "모두 제거"
},
"notifications": {
"removed": "누락된 파일이 제거되었음"
}
},
"library": {
"name": "라이브러리 |||| 라이브러리들",
"fields": {
"name": "이름",
"path": "경로",
"remotePath": "원격 경로",
"lastScanAt": "최근 스캔",
"songCount": "노래",
"albumCount": "앨범",
"artistCount": "아티스트",
"totalSongs": "노래",
"totalAlbums": "앨범",
"totalArtists": "아티스트",
"totalFolders": "폴더",
"totalFiles": "파일",
"totalMissingFiles": "누락된 파일",
"totalSize": "총 크기",
"totalDuration": "기간",
"defaultNewUsers": "신규 사용자 기본값",
"createdAt": "생성됨",
"updatedAt": "업데이트됨"
},
"sections": {
"basic": "기본 정보",
"statistics": "통계"
},
"actions": {
"scan": "라이브러리 스캔",
"manageUsers": "자용자 접속 관리",
"viewDetails": "상세 보기"
},
"notifications": {
"created": "라이브러리가 성공적으로 생성됨",
"updated": "라이브러리가 성공적으로 업데이트됨",
"deleted": "라이브러리가 성공적으로 삭제됨",
"scanStarted": "라이브러리 스캔 스작됨",
"scanCompleted": "라이브러리 스캔 완료됨"
},
"validation": {
"nameRequired": "라이브러리 이름이 필요함",
"pathRequired": "라이브러리 경로가 필요함",
"pathNotDirectory": "라이브러리 경로는 디렉터리여야 함",
"pathNotFound": "라이브러리 경로를 찾을 수 없음",
"pathNotAccessible": "라이브러리 경로에 접근할 수 없음",
"pathInvalid": "잘못된 라이브러리 경로"
},
"messages": {
"deleteConfirm": "이 라이브러리를 삭제할까요? 삭제하면 연결된 모든 데이터와 사용자 접속 권한이 제거됩니다.",
"scanInProgress": "스캔 진행 중...",
"noLibrariesAssigned": "이 사용자에게 할당된 라이브러리가 없음"
}
}
},
"ra": {
@ -398,11 +476,15 @@
"transcodingDisabled": "웹 인터페이스를 통한 트랜스코딩 구성 변경은 보안상의 이유로 비활성화되어 있습니다. 트랜스코딩 옵션을 변경(편집 또는 추가)하려면, %{config} 구성 옵션으로 서버를 다시 시작하세요.",
"transcodingEnabled": "Navidrome은 현재 %{config}로 실행 중이므로 웹 인터페이스를 사용하여 트랜스코딩 설정에서 시스템 명령을 실행할 수 있습니다. 보안상의 이유로 비활성화하고 트랜스코딩 옵션을 구성할 때만 활성화하는 것이 좋습니다.",
"songsAddedToPlaylist": "1 개의 노래를 재생목록에 추가하였음 |||| %{smart_count} 개의 노래를 재생 목록에 추가하였음",
"noSimilarSongsFound": "비슷한 노래를 찾을 수 없음",
"noTopSongsFound": "인기곡을 찾을 수 없음",
"noPlaylistsAvailable": "사용 가능한 노래 없음",
"delete_user_title": "사용자 '%{name}' 삭제",
"delete_user_content": "이 사용자와 해당 사용자의 모든 데이터(재생 목록 및 환경 설정 포함)를 삭제할까요?",
"remove_missing_title": "누락된 파일들 제거",
"remove_missing_content": "선택한 누락된 파일을 데이터베이스에서 삭제할까요? 삭제하면 재생 횟수 및 평점을 포함하여 해당 파일에 대한 모든 참조가 영구적으로 삭제됩니다.",
"remove_all_missing_title": "누락된 모든 파일 제거",
"remove_all_missing_content": "데이터베이스에서 누락된 모든 파일을 제거할까요? 이렇게 하면 해당 게임의 플레이 횟수와 평점을 포함한 모든 참조 내용이 영구적으로 삭제됩니다.",
"notifications_blocked": "브라우저 설정에서 이 사이트의 알림을 차단하였음",
"notifications_not_available": "이 브라우저는 데스크톱 알림을 지원하지 않거나 https를 통해 Navidrome에 접속하고 있지 않음",
"lastfmLinkSuccess": "Last.fm이 성공적으로 연결되었고 스크로블링이 활성화되었음",
@ -429,6 +511,12 @@
},
"menu": {
"library": "라이브러리",
"librarySelector": {
"allLibraries": "모든 라이브러리 (%{count})",
"multipleLibraries": "%{selected} / %{total} 라이브러리",
"selectLibraries": "라이브러리 선택",
"none": "없음"
},
"settings": "설정",
"version": "버전",
"theme": "테마",
@ -491,6 +579,21 @@
"disabled": "비활성화",
"waiting": "대기중"
}
},
"tabs": {
"about": "정보",
"config": "구성"
},
"config": {
"configName": "구성 이름",
"environmentVariable": "환경 변수",
"currentValue": "현재 값",
"configurationFile": "구성 파일",
"exportToml": "구성 내보내기 (TOML)",
"exportSuccess": "TOML 형식으로 클립보드로 내보낸 구성",
"exportFailed": "구성 복사 실패",
"devFlagsHeader": "개발 플래그 (변경/삭제 가능)",
"devFlagsComment": "이는 실험적 설정이므로 향후 버전에서 제거될 수 있음"
}
},
"activity": {
@ -499,7 +602,15 @@
"quickScan": "빠른 스캔",
"fullScan": "전체 스캔",
"serverUptime": "서버 가동 시간",
"serverDown": "오프라인"
"serverDown": "오프라인",
"scanType": "유형",
"status": "스캔 오류",
"elapsedTime": "경과 시간"
},
"nowPlaying": {
"title": "현재 재생 중",
"empty": "재생 중인 콘텐츠 없음",
"minutesAgo": "%{smart_count} 분 전"
},
"help": {
"title": "Navidrome 단축키",

View File

@ -5,7 +5,7 @@
"name": "Nummer |||| Nummers",
"fields": {
"albumArtist": "Album Artiest",
"duration": "Lengte",
"duration": "Afspeelduur",
"trackNumber": "Nummer #",
"playCount": "Aantal keren afgespeeld",
"title": "Titel",
@ -35,7 +35,8 @@
"rawTags": "Onbewerkte tags",
"bitDepth": "Bit diepte",
"sampleRate": "Sample waarde",
"missing": "Ontbrekend"
"missing": "Ontbrekend",
"libraryName": "Bibliotheek"
},
"actions": {
"addToQueue": "Voeg toe aan wachtrij",
@ -44,7 +45,8 @@
"shuffleAll": "Shuffle alles",
"download": "Downloaden",
"playNext": "Volgende",
"info": "Meer info"
"info": "Meer info",
"showInPlaylist": "Toon in afspeellijst"
}
},
"album": {
@ -55,7 +57,7 @@
"duration": "Afspeelduur",
"songCount": "Nummers",
"playCount": "Aantal keren afgespeeld",
"name": "Naam",
"name": "Titel",
"genre": "Genre",
"compilation": "Compilatie",
"year": "Jaar",
@ -65,9 +67,9 @@
"createdAt": "Datum toegevoegd",
"size": "Grootte",
"originalDate": "Origineel",
"releaseDate": "Uitgegeven",
"releaseDate": "Uitgave",
"releases": "Uitgave |||| Uitgaven",
"released": "Uitgegeven",
"released": "Uitgave",
"recordLabel": "Label",
"catalogNum": "Catalogus nummer",
"releaseType": "Type",
@ -75,7 +77,8 @@
"media": "Media",
"mood": "Sfeer",
"date": "Opnamedatum",
"missing": "Ontbrekend"
"missing": "Ontbrekend",
"libraryName": "Bibliotheek"
},
"actions": {
"playAll": "Afspelen",
@ -123,7 +126,13 @@
"mixer": "Mixer |||| Mixers",
"remixer": "Remixer |||| Remixers",
"djmixer": "DJ Mixer |||| DJ Mixers",
"performer": "Performer |||| Performers"
"performer": "Performer |||| Performers",
"maincredit": "Album Artiest of Artiest |||| Album Artiesten or Artiesten"
},
"actions": {
"shuffle": "Shuffle",
"radio": "Radio",
"topSongs": "Beste nummers"
}
},
"user": {
@ -132,7 +141,7 @@
"userName": "Gebruikersnaam",
"isAdmin": "Is beheerder",
"lastLoginAt": "Laatst ingelogd op",
"updatedAt": "Laatst gewijzigd op",
"updatedAt": "Laatst bijgewerkt op",
"name": "Naam",
"password": "Wachtwoord",
"createdAt": "Aangemaakt op",
@ -140,19 +149,26 @@
"currentPassword": "Huidig wachtwoord",
"newPassword": "Nieuw wachtwoord",
"token": "Token",
"lastAccessAt": "Meest recente toegang"
"lastAccessAt": "Meest recente toegang",
"libraries": "Bibliotheken"
},
"helperTexts": {
"name": "Naamswijziging wordt pas zichtbaar bij de volgende login"
"name": "Naamswijziging wordt pas zichtbaar bij de volgende login",
"libraries": "Selecteer specifieke bibliotheken voor deze gebruiker, of laat leeg om de standaardbiblliotheken te gebruiken"
},
"notifications": {
"created": "Aangemaakt door gebruiker",
"updated": "Gewijzigd door gebruiker",
"deleted": "Gewist door gebruiker"
"updated": "Bijgewerkt door gebruiker",
"deleted": "Gebruiker verwijderd"
},
"message": {
"listenBrainzToken": "Vul je ListenBrainz gebruikers-token in.",
"clickHereForToken": "Klik hier voor je token"
"clickHereForToken": "Klik hier voor je token",
"selectAllLibraries": "Selecteer alle bibliotheken",
"adminAutoLibraries": "Admin gebruikers hebben automatisch toegang tot alle bibliotheken"
},
"validation": {
"librariesRequired": "Minstens één bibliotheek moet geselecteerd worden voor niet-admin gebruikers"
}
},
"player": {
@ -181,10 +197,10 @@
"name": "Afspeellijst |||| Afspeellijsten",
"fields": {
"name": "Titel",
"duration": "Lengte",
"duration": "Afspeelduur",
"ownerName": "Eigenaar",
"public": "Publiek",
"updatedAt": "Laatst gewijzigd op",
"updatedAt": "Laatst bijgewerkt op",
"createdAt": "Aangemaakt op",
"songCount": "Nummers",
"comment": "Commentaar",
@ -197,11 +213,16 @@
"export": "Exporteer",
"makePublic": "Openbaar maken",
"makePrivate": "Privé maken",
"saveQueue": "Bewaar wachtrij als playlist"
"saveQueue": "Bewaar wachtrij als playlist",
"searchOrCreate": "Zoek afspeellijsten of typ om een nieuwe te starten...",
"pressEnterToCreate": "Druk Enter om nieuwe afspeellijst te maken",
"removeFromSelection": "Verwijder van selectie"
},
"message": {
"duplicate_song": "Dubbele nummers toevoegen",
"song_exist": "Er komen nummers dubbel in de afspeellijst. Wil je de dubbele nummers toevoegen of overslaan?"
"song_exist": "Er komen nummers dubbel in de afspeellijst. Wil je de dubbele nummers toevoegen of overslaan?",
"noPlaylistsFound": "Geen playlists gevonden",
"noPlaylists": "Geen playlists beschikbaar"
}
},
"radio": {
@ -210,8 +231,8 @@
"name": "Naam",
"streamUrl": "Stream URL",
"homePageUrl": "Hoofdpagina URL",
"updatedAt": "Geüpdate op",
"createdAt": "Gecreëerd op"
"updatedAt": "Bijgewerkt op",
"createdAt": "Aangemaakt op"
},
"actions": {
"playNow": "Speel nu"
@ -229,8 +250,8 @@
"visitCount": "Bezocht",
"format": "Formaat",
"maxBitRate": "Max. bitrate",
"updatedAt": "Geüpdatet op",
"createdAt": "Gecreëerd op",
"updatedAt": "Bijgewerkt op",
"createdAt": "Aangemaakt op",
"downloadable": "Downloads toestaan?"
}
},
@ -239,7 +260,8 @@
"fields": {
"path": "Pad",
"size": "Grootte",
"updatedAt": "Verdwenen op"
"updatedAt": "Verdwenen op",
"libraryName": "Bibliotheek"
},
"actions": {
"remove": "Verwijder",
@ -249,6 +271,58 @@
"removed": "Ontbrekende bestanden verwijderd"
},
"empty": "Geen ontbrekende bestanden"
},
"library": {
"name": "Bibliotheek |||| Bibliotheken",
"fields": {
"name": "Naam",
"path": "Pad",
"remotePath": "Extern pad",
"lastScanAt": "Laatste scan",
"songCount": "Nummers",
"albumCount": "Albums",
"artistCount": "Artiesten",
"totalSongs": "Nummers",
"totalAlbums": "Albums",
"totalArtists": "Artiesten",
"totalFolders": "Mappen",
"totalFiles": "Bestanden",
"totalMissingFiles": "Ontbrekende bestanden",
"totalSize": "Totale bestandsgrootte",
"totalDuration": "Afspeelduur",
"defaultNewUsers": "Standaard voor nieuwe gebruikers",
"createdAt": "Aangemaakt",
"updatedAt": "Bijgewerkt"
},
"sections": {
"basic": "Basisinformatie",
"statistics": "Statistieken"
},
"actions": {
"scan": "Scan bibliotheek",
"manageUsers": "Beheer gebruikerstoegang",
"viewDetails": "Bekijk details"
},
"notifications": {
"created": "Bibliotheek succesvol aangemaakt",
"updated": "Bibliotheek succesvol bijgewerkt",
"deleted": "Bibliotheek succesvol verwijderd",
"scanStarted": "Bibliotheekscan is gestart",
"scanCompleted": "Bibliotheekscan is voltooid"
},
"validation": {
"nameRequired": "Bibliotheek naam is vereist",
"pathRequired": "Pad naar bibliotheek is vereist",
"pathNotDirectory": "Pad naar bibliotheek moet een map zijn",
"pathNotFound": "Pad naar bibliotheek niet gevonden",
"pathNotAccessible": "Pad naar bibliotheek is niet toegankelijk",
"pathInvalid": "Ongeldig pad naar bibliotheek"
},
"messages": {
"deleteConfirm": "Weet je zeker dat je deze bibliotheek wil verwijderen? Dit verwijdert ook alle gerelateerde data en gebruikerstoegang.",
"scanInProgress": "Scan is bezig...",
"noLibrariesAssigned": "Geen bibliotheken aan deze gebruiker toegewezen"
}
}
},
"ra": {
@ -430,7 +504,9 @@
"remove_missing_title": "Verwijder ontbrekende bestanden",
"remove_missing_content": "Weet je zeker dat je alle ontbrekende bestanden van de database wil verwijderen? Dit wist permanent al hun referenties inclusief afspeel tellers en beoordelingen.",
"remove_all_missing_title": "Verwijder alle ontbrekende bestanden",
"remove_all_missing_content": "Weet je zeker dat je alle ontbrekende bestanden van de database wil verwijderen? Dit wist permanent al hun referenties inclusief afspeel tellers en beoordelingen."
"remove_all_missing_content": "Weet je zeker dat je alle ontbrekende bestanden van de database wil verwijderen? Dit wist permanent al hun referenties inclusief afspeel tellers en beoordelingen.",
"noSimilarSongsFound": "Geen vergelijkbare nummers gevonden",
"noTopSongsFound": "Geen beste nummers gevonden"
},
"menu": {
"library": "Bibliotheek",
@ -459,7 +535,13 @@
"albumList": "Albums",
"about": "Over",
"playlists": "Afspeellijsten",
"sharedPlaylists": "Gedeelde afspeellijsten"
"sharedPlaylists": "Gedeelde afspeellijsten",
"librarySelector": {
"allLibraries": "Alle bibliotheken (%{count})",
"multipleLibraries": "%{selected} van %{total} bibliotheken",
"selectLibraries": "Selecteer bibliotheken",
"none": "Geen"
}
},
"player": {
"playListsText": "Wachtrij",
@ -468,7 +550,7 @@
"notContentText": "Geen muziek",
"clickToPlayText": "Klik om af te spelen",
"clickToPauseText": "Klik om te pauzeren",
"nextTrackText": "Volgende",
"nextTrackText": "Volgend nummer",
"previousTrackText": "Vorige",
"reloadText": "Herladen",
"volumeText": "Volume",
@ -496,11 +578,26 @@
"disabled": "Uitgeschakeld",
"waiting": "Wachten"
}
},
"tabs": {
"about": "Over",
"config": "Configuratie"
},
"config": {
"configName": "Config Naam",
"environmentVariable": "Omgevingsvariabele",
"currentValue": "Huidige waarde",
"configurationFile": "Configuratiebestand",
"exportToml": "Exporteer configuratie (TOML)",
"exportSuccess": "Configuratie geëxporteerd naar klembord in TOML formaat",
"exportFailed": "Kopiëren van configuratie mislukt",
"devFlagsHeader": "Ontwikkelaarsinstellingen (onder voorbehoud)",
"devFlagsComment": "Dit zijn experimentele instellingen en worden mogelijk in latere versies verwijderd"
}
},
"activity": {
"title": "Activiteit",
"totalScanned": "Totaal gescande folders",
"totalScanned": "Totaal gescande mappen",
"quickScan": "Snelle scan",
"fullScan": "Volledige scan",
"serverUptime": "Server uptime",
@ -522,5 +619,10 @@
"toggle_love": "Voeg toe aan favorieten",
"current_song": "Ga naar huidig nummer"
}
},
"nowPlaying": {
"title": "Speelt nu",
"empty": "Er wordt niets afgespeed",
"minutesAgo": "%{smart_count} minuut geleden |||| %{smart_count} minuten geleden"
}
}

View File

@ -300,6 +300,8 @@
},
"actions": {
"scan": "Scanear Biblioteca",
"quickScan": "Scan Rápido",
"fullScan": "Scan Completo",
"manageUsers": "Gerenciar Acesso do Usuário",
"viewDetails": "Ver Detalhes"
},
@ -308,6 +310,9 @@
"updated": "Biblioteca atualizada com sucesso",
"deleted": "Biblioteca excluída com sucesso",
"scanStarted": "Scan da biblioteca iniciada",
"quickScanStarted": "Scan rápido iniciado",
"fullScanStarted": "Scan completo iniciado",
"scanError": "Erro ao iniciar o scan. Verifique os logs",
"scanCompleted": "Scan da biblioteca concluída"
},
"validation": {
@ -598,11 +603,12 @@
"activity": {
"title": "Atividade",
"totalScanned": "Total de pastas scaneadas",
"quickScan": "Scan rápido",
"fullScan": "Scan completo",
"quickScan": "Rápido",
"fullScan": "Completo",
"selectiveScan": "Seletivo",
"serverUptime": "Uptime do servidor",
"serverDown": "DESCONECTADO",
"scanType": "Tipo",
"scanType": "Último Scan",
"status": "Erro",
"elapsedTime": "Duração"
},

View File

@ -26,7 +26,17 @@
"bpm": "BPM",
"playDate": "เล่นล่าสุด",
"channels": "ช่อง",
"createdAt": "เพิ่มเมื่อ"
"createdAt": "เพิ่มเมื่อ",
"grouping": "จัดกลุ่ม",
"mood": "อารมณ์",
"participants": "ผู้มีส่วนร่วม",
"tags": "แทกเพิ่มเติม",
"mappedTags": "แมพแทก",
"rawTags": "แทกเริ่มต้น",
"bitDepth": "Bit depth",
"sampleRate": "แซมเปิ้ลเรต",
"missing": "หายไป",
"libraryName": "ห้องสมุด"
},
"actions": {
"addToQueue": "เพิ่มในคิว",
@ -35,7 +45,8 @@
"shuffleAll": "สุ่มทั้งหมด",
"download": "ดาวน์โหลด",
"playNext": "เล่นถัดไป",
"info": "ดูรายละเอียด"
"info": "ดูรายละเอียด",
"showInPlaylist": "แสดงในเพลย์ลิสต์"
}
},
"album": {
@ -58,7 +69,16 @@
"originalDate": "วันที่เริ่ม",
"releaseDate": "เผยแพร่เมื่อ",
"releases": "เผยแพร่ |||| เผยแพร่",
"released": "เผยแพร่เมื่อ"
"released": "เผยแพร่เมื่อ",
"recordLabel": "ป้าย",
"catalogNum": "หมายเลขแคตาล็อก",
"releaseType": "ประเภท",
"grouping": "จัดกลุ่ม",
"media": "มีเดีย",
"mood": "อารมณ์",
"date": "บันทึกเมื่อ",
"missing": "หายไป",
"libraryName": "ห้องสมุด"
},
"actions": {
"playAll": "เล่นทั้งหมด",
@ -89,7 +109,30 @@
"playCount": "เล่นแล้ว",
"rating": "ความนิยม",
"genre": "ประเภท",
"size": "ขนาด"
"size": "ขนาด",
"role": "Role",
"missing": "หายไป"
},
"roles": {
"albumartist": "ศิลปินอัลบั้ม |||| ศิลปินอัลบั้ม",
"artist": "ศิลปิน |||| ศิลปิน",
"composer": "ผู้แต่ง |||| ผู้แต่ง",
"conductor": "คอนดักเตอร์ |||| คอนดักเตอร์",
"lyricist": "เนื้อเพลง |||| เนื้อเพลง",
"arranger": "ผู้ดำเนินการ |||| ผู้ดำเนินการ",
"producer": "ผู้จัด |||| ผู้จัด",
"director": "ไดเรกเตอร์ |||| ไดเรกเตอร์",
"engineer": "วิศวกร |||| วิศวกร",
"mixer": "มิกเซอร์ |||| มิกเซอร์",
"remixer": "รีมิกเซอร์ |||| รีมิกเซอร์",
"djmixer": "ดีเจมิกเซอร์ |||| ดีเจมิกเซอร์",
"performer": "ผู้เล่น |||| ผู้เล่น",
"maincredit": "ศิลปิน |||| ศิลปิน"
},
"actions": {
"shuffle": "เล่นสุ่ม",
"radio": "วิทยุ",
"topSongs": "เพลงยอดนิยม"
}
},
"user": {
@ -106,10 +149,12 @@
"currentPassword": "รหัสผ่านปัจจุบัน",
"newPassword": "รหัสผ่านใหม่",
"token": "โทเคน",
"lastAccessAt": "เข้าใช้ล่าสุด"
"lastAccessAt": "เข้าใช้ล่าสุด",
"libraries": "ห้องสมุด"
},
"helperTexts": {
"name": "การเปลี่ยนชื่อจะมีผลในการล็อกอินครั้งถัดไป"
"name": "การเปลี่ยนชื่อจะมีผลในการล็อกอินครั้งถัดไป",
"libraries": "เลือกห้องสมุดสำหรับผู้ใช้นี้หรือปล่อยว่างเพื่อใช้ห้องสมุดเริ่มต้น"
},
"notifications": {
"created": "สร้างชื่อผู้ใช้",
@ -118,7 +163,12 @@
},
"message": {
"listenBrainzToken": "ใส่โทเคน ListenBrainz ของคุณ",
"clickHereForToken": "กดที่นี่เพื่อรับโทเคนของคุณ"
"clickHereForToken": "กดที่นี่เพื่อรับโทเคนของคุณ",
"selectAllLibraries": "เลือกห้องสมุดทั้งหมด",
"adminAutoLibraries": "ผู้ดูแลเข้าถึงห้องสมุดทั้งหมดโดยอัตโนมัติ"
},
"validation": {
"librariesRequired": "ต้องเลือกห้องสมุด 1 ห้อง สำหรับผู้ใช้ที่ไม่ใช่ผู้ดูแล"
}
},
"player": {
@ -162,11 +212,17 @@
"addNewPlaylist": "สร้าง \"%{name}\"",
"export": "ส่งออก",
"makePublic": "ทำเป็นสาธารณะ",
"makePrivate": "ทำเป็นส่วนตัว"
"makePrivate": "ทำเป็นส่วนตัว",
"saveQueue": "บันทึกคิวลงเพลย์ลิสต์",
"searchOrCreate": "ค้นหาเพลย์ลิสต์หรือพิมพ์เพื่อสร้างใหม่",
"pressEnterToCreate": "กด Enter เพื่อสร้างเพลย์ลิสต์",
"removeFromSelection": "เอาออกจากที่เลือกไว้"
},
"message": {
"duplicate_song": "เพิ่มเพลงซ้ำ",
"song_exist": "เพิ่มเพลงซ้ำกันในเพลย์ลิสต์ คุณจะเพิ่มเพลงต่อหรือข้าม"
"song_exist": "เพิ่มเพลงซ้ำกันในเพลย์ลิสต์ คุณจะเพิ่มเพลงต่อหรือข้าม",
"noPlaylistsFound": "ไม่พบเพลย์ลิสต์",
"noPlaylists": "ไม่มีเพลย์ลิสต์อยู่"
}
},
"radio": {
@ -198,6 +254,75 @@
"createdAt": "สร้างเมื่อ",
"downloadable": "อนุญาตให้ดาวโหลด?"
}
},
"missing": {
"name": "ไฟล์ที่หายไป |||| ไฟล์ที่หายไป",
"fields": {
"path": "พาร์ท",
"size": "ขนาด",
"updatedAt": "หายไปจาก",
"libraryName": "ห้องสมุด"
},
"actions": {
"remove": "เอาออก",
"remove_all": "เอาออกทั้งหมด"
},
"notifications": {
"removed": "เอาไฟล์ที่หายไปออกแล้ว"
},
"empty": "ไม่มีไฟล์หาย"
},
"library": {
"name": "ห้องสมุด |||| ห้องสมุด",
"fields": {
"name": "ชื่อ",
"path": "พาร์ท",
"remotePath": "รีโมทพาร์ท",
"lastScanAt": "สแกนล่าสุด",
"songCount": "เพลง",
"albumCount": "อัลบัม",
"artistCount": "ศิลปิน",
"totalSongs": "เพลง",
"totalAlbums": "อัลบัม",
"totalArtists": "ศิลปิน",
"totalFolders": "แฟ้ม",
"totalFiles": "ไฟล์",
"totalMissingFiles": "ไฟล์ที่หายไป",
"totalSize": "ขนาดทั้งหมด",
"totalDuration": "ความยาว",
"defaultNewUsers": "ค่าเริ่มต้นผู้ใช้ใหม่",
"createdAt": "สร้าง",
"updatedAt": "อัพเดท"
},
"sections": {
"basic": "ข้อมูลเบื้องต้น",
"statistics": "สถิติ"
},
"actions": {
"scan": "สแกนห้องสมุด",
"manageUsers": "ตั้งค่าการเข้าถึง",
"viewDetails": "ดูรายละเอียด"
},
"notifications": {
"created": "สร้างห้องสมุดเรียบร้อย",
"updated": "อัพเดทห้องสมุดเรียบร้อย",
"deleted": "ลบห้องสมุดเพลงเรียบร้อยแล้ว",
"scanStarted": "เริ่มสแกนห้องสมุด",
"scanCompleted": "สแกนห้องสมุดเสร็จแล้ว"
},
"validation": {
"nameRequired": "ต้องใส่ชื่อห้องสมุดเพลง",
"pathRequired": "ต้องใส่พาร์ทของห้องสมุด",
"pathNotDirectory": "พาร์ทของห้องสมุดต้องเป็นแฟ้ม",
"pathNotFound": "ไม่เจอพาร์ทของห้องสมุด",
"pathNotAccessible": "ไม่สามารถเข้าพาร์ทของห้องสมุด",
"pathInvalid": "พาร์ทห้องสมุดไม่ถูก"
},
"messages": {
"deleteConfirm": "คุณแน่ใจว่าจะลบห้องสมุดนี้? นี่จะลบข้อมูลและการเข้าถึงของผู้ใช้ที่เกี่ยวข้องทั้งหมด",
"scanInProgress": "กำลังสแกน...",
"noLibrariesAssigned": "ไม่มีห้องสมุดสำหรับผู้ใช้นี้"
}
}
},
"ra": {
@ -375,7 +500,13 @@
"shareSuccess": "คัดลอก URL ไปคลิปบอร์ด: %{url}",
"shareFailure": "คัดลอก URL %{url} ไปคลิปบอร์ดผิดพลาด",
"downloadDialogTitle": "ดาวโหลด %{resource} '%{name}' (%{size})",
"shareCopyToClipboard": "คัดลอกไปคลิปบอร์ด: Ctrl+C, Enter"
"shareCopyToClipboard": "คัดลอกไปคลิปบอร์ด: Ctrl+C, Enter",
"remove_missing_title": "ลบรายการไฟล์ที่หายไป",
"remove_missing_content": "คุณแน่ใจว่าจะเอารายการไฟล์ที่หายไปออกจากดาต้าเบส นี่จะเป็นการลบข้อมูลอ้างอิงทั้งหมดของไฟล์ออกอย่างถาวร",
"remove_all_missing_title": "เอารายการไฟล์ที่หายไปออกทั้งหมด",
"remove_all_missing_content": "คุณแน่ใจว่าจะเอารายการไฟล์ที่หายไปออกจากดาต้าเบส นี่จะเป็นการลบข้อมูลอ้างอิงทั้งหมดของไฟล์ออกอย่างถาวร",
"noSimilarSongsFound": "ไม่มีเพลงคล้ายกัน",
"noTopSongsFound": "ไม่พบเพลงยอดนิยม"
},
"menu": {
"library": "ห้องสมุดเพลง",
@ -404,7 +535,13 @@
"albumList": "อัลบั้ม",
"about": "เกี่ยวกับ",
"playlists": "เพลย์ลิสต์",
"sharedPlaylists": "เพลย์ลิสต์ที่แบ่งปัน"
"sharedPlaylists": "เพลย์ลิสต์ที่แบ่งปัน",
"librarySelector": {
"allLibraries": "ห้องสมุด (%{count}) ห้อง",
"multipleLibraries": "%{selected} ของ %{total} ห้องสมุด",
"selectLibraries": "เลือกห้องสมุด",
"none": "ไม่มี"
}
},
"player": {
"playListsText": "คิวเล่น",
@ -441,6 +578,21 @@
"disabled": "ปิดการทำงาน",
"waiting": "รอ"
}
},
"tabs": {
"about": "เกี่ยวกับ",
"config": "การตั้งค่า"
},
"config": {
"configName": "ชื่อการตั้งค่า",
"environmentVariable": "ค่าทั่วไป",
"currentValue": "ค่าปัจจุบัน",
"configurationFile": "ไฟล์การตั้งค่า",
"exportToml": "นำออกการตั้งค่า (TOML)",
"exportSuccess": "นำออกการตั้งค่าไปยังคลิปบอร์ดในรูปแบบ TOML แล้ว",
"exportFailed": "คัดลอกการตั้งค่าล้มเหลว",
"devFlagsHeader": "ปักธงการพัฒนา (อาจมีการเปลี่ยน/เอาออก)",
"devFlagsComment": "การตั้งค่านี้อยู่ในช่วงทดลองและอาจจะมีการเอาออกในเวอร์ชั่นหลัง"
}
},
"activity": {
@ -449,7 +601,10 @@
"quickScan": "สแกนแบบเร็ว",
"fullScan": "สแกนทั้งหมด",
"serverUptime": "เซิร์ฟเวอร์ออนไลน์นาน",
"serverDown": "ออฟไลน์"
"serverDown": "ออฟไลน์",
"scanType": "ประเภท",
"status": "สแกนผิดพลาด",
"elapsedTime": "เวลาที่ใช้"
},
"help": {
"title": "คีย์ลัด Navidrome",
@ -464,5 +619,10 @@
"toggle_love": "เพิ่มเพลงนี้ไปยังรายการโปรด",
"current_song": "ไปยังเพลงปัจจุบัน"
}
},
"nowPlaying": {
"title": "กำลังเล่น",
"empty": "ไม่มีเพลงเล่น",
"minutesAgo": "%{smart_count} นาทีที่แล้ว |||| %{smart_count} นาทีที่แล้ว"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -26,24 +26,8 @@ var (
ErrAlreadyScanning = errors.New("already scanning")
)
type Scanner interface {
// ScanAll starts a full scan of the music library. This is a blocking operation.
ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error)
Status(context.Context) (*StatusInfo, error)
}
type StatusInfo struct {
Scanning bool
LastScan time.Time
Count uint32
FolderCount uint32
LastError string
ScanType string
ElapsedTime time.Duration
}
func New(rootCtx context.Context, ds model.DataStore, cw artwork.CacheWarmer, broker events.Broker,
pls core.Playlists, m metrics.Metrics) Scanner {
pls core.Playlists, m metrics.Metrics) model.Scanner {
c := &controller{
rootCtx: rootCtx,
ds: ds,
@ -65,9 +49,10 @@ func (s *controller) getScanner() scanner {
return &scannerImpl{ds: s.ds, cw: s.cw, pls: s.pls}
}
// CallScan starts an in-process scan of the music library.
// CallScan starts an in-process scan of specific library/folder pairs.
// If targets is empty, it scans all libraries.
// This is meant to be called from the command line (see cmd/scan.go).
func CallScan(ctx context.Context, ds model.DataStore, pls core.Playlists, fullScan bool) (<-chan *ProgressInfo, error) {
func CallScan(ctx context.Context, ds model.DataStore, pls core.Playlists, fullScan bool, targets []model.ScanTarget) (<-chan *ProgressInfo, error) {
release, err := lockScan(ctx)
if err != nil {
return nil, err
@ -79,7 +64,7 @@ func CallScan(ctx context.Context, ds model.DataStore, pls core.Playlists, fullS
go func() {
defer close(progress)
scanner := &scannerImpl{ds: ds, cw: artwork.NoopCacheWarmer(), pls: pls}
scanner.scanAll(ctx, fullScan, progress)
scanner.scanFolders(ctx, fullScan, targets, progress)
}()
return progress, nil
}
@ -99,8 +84,11 @@ type ProgressInfo struct {
ForceUpdate bool
}
// scanner defines the interface for different scanner implementations.
// This allows for swapping between in-process and external scanners.
type scanner interface {
scanAll(ctx context.Context, fullScan bool, progress chan<- *ProgressInfo)
// scanFolders performs the actual scanning of folders. If targets is nil, it scans all libraries.
scanFolders(ctx context.Context, fullScan bool, targets []model.ScanTarget, progress chan<- *ProgressInfo)
}
type controller struct {
@ -158,7 +146,7 @@ func (s *controller) getScanInfo(ctx context.Context) (scanType string, elapsed
return scanType, elapsed, lastErr
}
func (s *controller) Status(ctx context.Context) (*StatusInfo, error) {
func (s *controller) Status(ctx context.Context) (*model.ScannerStatus, error) {
lastScanTime, err := s.getLastScanTime(ctx)
if err != nil {
return nil, fmt.Errorf("getting last scan time: %w", err)
@ -167,7 +155,7 @@ func (s *controller) Status(ctx context.Context) (*StatusInfo, error) {
scanType, elapsed, lastErr := s.getScanInfo(ctx)
if running.Load() {
status := &StatusInfo{
status := &model.ScannerStatus{
Scanning: true,
LastScan: lastScanTime,
Count: s.count.Load(),
@ -183,7 +171,7 @@ func (s *controller) Status(ctx context.Context) (*StatusInfo, error) {
if err != nil {
return nil, fmt.Errorf("getting library stats: %w", err)
}
return &StatusInfo{
return &model.ScannerStatus{
Scanning: false,
LastScan: lastScanTime,
Count: uint32(count),
@ -208,6 +196,10 @@ func (s *controller) getCounters(ctx context.Context) (int64, int64, error) {
}
func (s *controller) ScanAll(requestCtx context.Context, fullScan bool) ([]string, error) {
return s.ScanFolders(requestCtx, fullScan, nil)
}
func (s *controller) ScanFolders(requestCtx context.Context, fullScan bool, targets []model.ScanTarget) ([]string, error) {
release, err := lockScan(requestCtx)
if err != nil {
return nil, err
@ -224,7 +216,7 @@ func (s *controller) ScanAll(requestCtx context.Context, fullScan bool) ([]strin
go func() {
defer close(progress)
scanner := s.getScanner()
scanner.scanAll(ctx, fullScan, progress)
scanner.scanFolders(ctx, fullScan, targets, progress)
}()
// Wait for the scan to finish, sending progress events to all connected clients

View File

@ -9,6 +9,7 @@ import (
"github.com/navidrome/navidrome/core/artwork"
"github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/server/events"
@ -20,7 +21,7 @@ import (
var _ = Describe("Controller", func() {
var ctx context.Context
var ds *tests.MockDataStore
var ctrl scanner.Scanner
var ctrl model.Scanner
Describe("Status", func() {
BeforeEach(func() {

View File

@ -8,10 +8,12 @@ import (
"io"
"os"
"os/exec"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/log"
. "github.com/navidrome/navidrome/utils/gg"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/utils/slice"
)
// scannerExternal is a scanner that runs an external process to do the scanning. It is used to avoid
@ -23,19 +25,41 @@ import (
// process will forward them to the caller.
type scannerExternal struct{}
func (s *scannerExternal) scanAll(ctx context.Context, fullScan bool, progress chan<- *ProgressInfo) {
func (s *scannerExternal) scanFolders(ctx context.Context, fullScan bool, targets []model.ScanTarget, progress chan<- *ProgressInfo) {
s.scan(ctx, fullScan, targets, progress)
}
func (s *scannerExternal) scan(ctx context.Context, fullScan bool, targets []model.ScanTarget, progress chan<- *ProgressInfo) {
exe, err := os.Executable()
if err != nil {
progress <- &ProgressInfo{Error: fmt.Sprintf("failed to get executable path: %s", err)}
return
}
log.Debug(ctx, "Spawning external scanner process", "fullScan", fullScan, "path", exe)
cmd := exec.CommandContext(ctx, exe, "scan",
// Build command arguments
args := []string{
"scan",
"--nobanner", "--subprocess",
"--configfile", conf.Server.ConfigFile,
"--datafolder", conf.Server.DataFolder,
"--cachefolder", conf.Server.CacheFolder,
If(fullScan, "--full", ""))
}
// Add targets if provided
if len(targets) > 0 {
targetsStr := strings.Join(slice.Map(targets, func(t model.ScanTarget) string { return t.String() }), ",")
args = append(args, "--targets", targetsStr)
log.Debug(ctx, "Spawning external scanner process with targets", "fullScan", fullScan, "path", exe, "targets", targetsStr)
} else {
log.Debug(ctx, "Spawning external scanner process", "fullScan", fullScan, "path", exe)
}
// Add full scan flag if needed
if fullScan {
args = append(args, "--full")
}
cmd := exec.CommandContext(ctx, exe, args...)
in, out := io.Pipe()
defer in.Close()

View File

@ -15,9 +15,7 @@ import (
"github.com/navidrome/navidrome/utils/chrono"
)
func newFolderEntry(job *scanJob, path string) *folderEntry {
id := model.FolderID(job.lib, path)
info := job.popLastUpdate(id)
func newFolderEntry(job *scanJob, id, path string, updTime time.Time, hash string) *folderEntry {
f := &folderEntry{
id: id,
job: job,
@ -25,8 +23,8 @@ func newFolderEntry(job *scanJob, path string) *folderEntry {
audioFiles: make(map[string]fs.DirEntry),
imageFiles: make(map[string]fs.DirEntry),
albumIDMap: make(map[string]string),
updTime: info.UpdatedAt,
prevHash: info.Hash,
updTime: updTime,
prevHash: hash,
}
return f
}

View File

@ -40,9 +40,8 @@ var _ = Describe("folder_entry", func() {
UpdatedAt: time.Now().Add(-30 * time.Minute),
Hash: "previous-hash",
}
job.lastUpdates[folderID] = updateInfo
entry := newFolderEntry(job, path)
entry := newFolderEntry(job, folderID, path, updateInfo.UpdatedAt, updateInfo.Hash)
Expect(entry.id).To(Equal(folderID))
Expect(entry.job).To(Equal(job))
@ -53,15 +52,10 @@ var _ = Describe("folder_entry", func() {
Expect(entry.updTime).To(Equal(updateInfo.UpdatedAt))
Expect(entry.prevHash).To(Equal(updateInfo.Hash))
})
})
It("creates a new folder entry with zero time when no previous update exists", func() {
entry := newFolderEntry(job, path)
Expect(entry.updTime).To(BeZero())
Expect(entry.prevHash).To(BeEmpty())
})
It("removes the lastUpdate from the job after popping", func() {
Describe("createFolderEntry", func() {
It("removes the lastUpdate from the job after creation", func() {
folderID := model.FolderID(lib, path)
updateInfo := model.FolderUpdateInfo{
UpdatedAt: time.Now().Add(-30 * time.Minute),
@ -69,8 +63,10 @@ var _ = Describe("folder_entry", func() {
}
job.lastUpdates[folderID] = updateInfo
newFolderEntry(job, path)
entry := job.createFolderEntry(path)
Expect(entry.updTime).To(Equal(updateInfo.UpdatedAt))
Expect(entry.prevHash).To(Equal(updateInfo.Hash))
Expect(job.lastUpdates).ToNot(HaveKey(folderID))
})
})
@ -79,7 +75,8 @@ var _ = Describe("folder_entry", func() {
var entry *folderEntry
BeforeEach(func() {
entry = newFolderEntry(job, path)
folderID := model.FolderID(lib, path)
entry = newFolderEntry(job, folderID, path, time.Time{}, "")
})
Describe("hasNoFiles", func() {
@ -458,7 +455,9 @@ var _ = Describe("folder_entry", func() {
Describe("integration scenarios", func() {
It("handles complete folder lifecycle", func() {
// Create new folder entry
entry := newFolderEntry(job, "music/rock/album")
folderPath := "music/rock/album"
folderID := model.FolderID(lib, folderPath)
entry := newFolderEntry(job, folderID, folderPath, time.Time{}, "")
// Initially new and has no files
Expect(entry.isNew()).To(BeTrue())

163
scanner/ignore_checker.go Normal file
View File

@ -0,0 +1,163 @@
package scanner
import (
"bufio"
"context"
"io/fs"
"path"
"strings"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/log"
ignore "github.com/sabhiram/go-gitignore"
)
// IgnoreChecker manages .ndignore patterns using a stack-based approach.
// Use Push() to add patterns when entering a folder, Pop() when leaving,
// and ShouldIgnore() to check if a path should be ignored.
type IgnoreChecker struct {
fsys fs.FS
patternStack [][]string // Stack of patterns for each folder level
currentPatterns []string // Flattened current patterns
matcher *ignore.GitIgnore // Compiled matcher for current patterns
}
// newIgnoreChecker creates a new IgnoreChecker for the given filesystem.
func newIgnoreChecker(fsys fs.FS) *IgnoreChecker {
return &IgnoreChecker{
fsys: fsys,
patternStack: make([][]string, 0),
}
}
// Push loads .ndignore patterns from the specified folder and adds them to the pattern stack.
// Use this when entering a folder during directory tree traversal.
func (ic *IgnoreChecker) Push(ctx context.Context, folder string) error {
patterns := ic.loadPatternsFromFolder(ctx, folder)
ic.patternStack = append(ic.patternStack, patterns)
ic.rebuildCurrentPatterns()
return nil
}
// Pop removes the most recent patterns from the stack.
// Use this when leaving a folder during directory tree traversal.
func (ic *IgnoreChecker) Pop() {
if len(ic.patternStack) > 0 {
ic.patternStack = ic.patternStack[:len(ic.patternStack)-1]
ic.rebuildCurrentPatterns()
}
}
// PushAllParents pushes patterns from root down to the target path.
// This is a convenience method for when you need to check a specific path
// without recursively walking the tree. It handles the common pattern of
// pushing all parent directories from root to the target.
// This method is optimized to compile patterns only once at the end.
func (ic *IgnoreChecker) PushAllParents(ctx context.Context, targetPath string) error {
if targetPath == "." || targetPath == "" {
// Simple case: just push root
return ic.Push(ctx, ".")
}
// Load patterns for root
patterns := ic.loadPatternsFromFolder(ctx, ".")
ic.patternStack = append(ic.patternStack, patterns)
// Load patterns for each parent directory
currentPath := "."
parts := strings.Split(path.Clean(targetPath), "/")
for _, part := range parts {
if part == "." || part == "" {
continue
}
currentPath = path.Join(currentPath, part)
patterns = ic.loadPatternsFromFolder(ctx, currentPath)
ic.patternStack = append(ic.patternStack, patterns)
}
// Rebuild and compile patterns only once at the end
ic.rebuildCurrentPatterns()
return nil
}
// ShouldIgnore checks if the given path should be ignored based on the current patterns.
// Returns true if the path matches any ignore pattern, false otherwise.
func (ic *IgnoreChecker) ShouldIgnore(ctx context.Context, relPath string) bool {
// Handle root/empty path - never ignore
if relPath == "" || relPath == "." {
return false
}
// If no patterns loaded, nothing to ignore
if ic.matcher == nil {
return false
}
matches := ic.matcher.MatchesPath(relPath)
if matches {
log.Trace(ctx, "Scanner: Ignoring entry matching .ndignore", "path", relPath)
}
return matches
}
// loadPatternsFromFolder reads the .ndignore file in the specified folder and returns the patterns.
// If the file doesn't exist, returns an empty slice.
// If the file exists but is empty, returns a pattern to ignore everything ("**/*").
func (ic *IgnoreChecker) loadPatternsFromFolder(ctx context.Context, folder string) []string {
ignoreFilePath := path.Join(folder, consts.ScanIgnoreFile)
var patterns []string
// Check if .ndignore file exists
if _, err := fs.Stat(ic.fsys, ignoreFilePath); err != nil {
// No .ndignore file in this folder
return patterns
}
// Read and parse the .ndignore file
ignoreFile, err := ic.fsys.Open(ignoreFilePath)
if err != nil {
log.Warn(ctx, "Scanner: Error opening .ndignore file", "path", ignoreFilePath, err)
return patterns
}
defer ignoreFile.Close()
lineScanner := bufio.NewScanner(ignoreFile)
for lineScanner.Scan() {
line := strings.TrimSpace(lineScanner.Text())
if line == "" || strings.HasPrefix(line, "#") {
continue // Skip empty lines, whitespace-only lines, and comments
}
patterns = append(patterns, line)
}
if err := lineScanner.Err(); err != nil {
log.Warn(ctx, "Scanner: Error reading .ndignore file", "path", ignoreFilePath, err)
return patterns
}
// If the .ndignore file is empty, ignore everything
if len(patterns) == 0 {
log.Trace(ctx, "Scanner: .ndignore file is empty, ignoring everything", "path", folder)
patterns = []string{"**/*"}
}
return patterns
}
// rebuildCurrentPatterns flattens the pattern stack into currentPatterns and recompiles the matcher.
func (ic *IgnoreChecker) rebuildCurrentPatterns() {
ic.currentPatterns = make([]string, 0)
for _, patterns := range ic.patternStack {
ic.currentPatterns = append(ic.currentPatterns, patterns...)
}
ic.compilePatterns()
}
// compilePatterns compiles the current patterns into a GitIgnore matcher.
func (ic *IgnoreChecker) compilePatterns() {
if len(ic.currentPatterns) == 0 {
ic.matcher = nil
return
}
ic.matcher = ignore.CompileIgnoreLines(ic.currentPatterns...)
}

View File

@ -0,0 +1,313 @@
package scanner
import (
"context"
"testing/fstest"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("IgnoreChecker", func() {
Describe("loadPatternsFromFolder", func() {
var ic *IgnoreChecker
var ctx context.Context
BeforeEach(func() {
ctx = context.Background()
})
Context("when .ndignore file does not exist", func() {
It("should return empty patterns", func() {
fsys := fstest.MapFS{}
ic = newIgnoreChecker(fsys)
patterns := ic.loadPatternsFromFolder(ctx, ".")
Expect(patterns).To(BeEmpty())
})
})
Context("when .ndignore file is empty", func() {
It("should return wildcard to ignore everything", func() {
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte("")},
}
ic = newIgnoreChecker(fsys)
patterns := ic.loadPatternsFromFolder(ctx, ".")
Expect(patterns).To(Equal([]string{"**/*"}))
})
})
DescribeTable("parsing .ndignore content",
func(content string, expectedPatterns []string) {
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte(content)},
}
ic = newIgnoreChecker(fsys)
patterns := ic.loadPatternsFromFolder(ctx, ".")
Expect(patterns).To(Equal(expectedPatterns))
},
Entry("single pattern", "*.txt", []string{"*.txt"}),
Entry("multiple patterns", "*.txt\n*.log", []string{"*.txt", "*.log"}),
Entry("with comments", "# comment\n*.txt\n# another\n*.log", []string{"*.txt", "*.log"}),
Entry("with empty lines", "*.txt\n\n*.log\n\n", []string{"*.txt", "*.log"}),
Entry("mixed content", "# header\n\n*.txt\n# middle\n*.log\n\n", []string{"*.txt", "*.log"}),
Entry("only comments and empty lines", "# comment\n\n# another\n", []string{"**/*"}),
Entry("trailing newline", "*.txt\n*.log\n", []string{"*.txt", "*.log"}),
Entry("directory pattern", "temp/", []string{"temp/"}),
Entry("wildcard pattern", "**/*.mp3", []string{"**/*.mp3"}),
Entry("multiple wildcards", "**/*.mp3\n**/*.flac\n*.log", []string{"**/*.mp3", "**/*.flac", "*.log"}),
Entry("negation pattern", "!important.txt", []string{"!important.txt"}),
Entry("comment with hash not at start is pattern", "not#comment", []string{"not#comment"}),
Entry("whitespace-only lines skipped", "*.txt\n \n*.log\n\t\n", []string{"*.txt", "*.log"}),
Entry("patterns with whitespace trimmed", " *.txt \n\t*.log\t", []string{"*.txt", "*.log"}),
)
})
Describe("Push and Pop", func() {
var ic *IgnoreChecker
var fsys fstest.MapFS
var ctx context.Context
BeforeEach(func() {
ctx = context.Background()
fsys = fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte("*.txt")},
"folder1/.ndignore": &fstest.MapFile{Data: []byte("*.mp3")},
"folder2/.ndignore": &fstest.MapFile{Data: []byte("*.flac")},
}
ic = newIgnoreChecker(fsys)
})
Context("Push", func() {
It("should add patterns to stack", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
Expect(len(ic.patternStack)).To(Equal(1))
Expect(ic.currentPatterns).To(ContainElement("*.txt"))
})
It("should compile matcher after push", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
Expect(ic.matcher).ToNot(BeNil())
})
It("should accumulate patterns from multiple levels", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
err = ic.Push(ctx, "folder1")
Expect(err).ToNot(HaveOccurred())
Expect(len(ic.patternStack)).To(Equal(2))
Expect(ic.currentPatterns).To(ConsistOf("*.txt", "*.mp3"))
})
It("should handle push when no .ndignore exists", func() {
err := ic.Push(ctx, "nonexistent")
Expect(err).ToNot(HaveOccurred())
Expect(len(ic.patternStack)).To(Equal(1))
Expect(ic.currentPatterns).To(BeEmpty())
})
})
Context("Pop", func() {
It("should remove most recent patterns", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
err = ic.Push(ctx, "folder1")
Expect(err).ToNot(HaveOccurred())
ic.Pop()
Expect(len(ic.patternStack)).To(Equal(1))
Expect(ic.currentPatterns).To(Equal([]string{"*.txt"}))
})
It("should handle Pop on empty stack gracefully", func() {
Expect(func() { ic.Pop() }).ToNot(Panic())
Expect(ic.patternStack).To(BeEmpty())
})
It("should set matcher to nil when all patterns popped", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
Expect(ic.matcher).ToNot(BeNil())
ic.Pop()
Expect(ic.matcher).To(BeNil())
})
It("should update matcher after pop", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
err = ic.Push(ctx, "folder1")
Expect(err).ToNot(HaveOccurred())
matcher1 := ic.matcher
ic.Pop()
matcher2 := ic.matcher
Expect(matcher1).ToNot(Equal(matcher2))
})
})
Context("multiple Push/Pop cycles", func() {
It("should maintain correct state through cycles", func() {
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
Expect(ic.currentPatterns).To(Equal([]string{"*.txt"}))
err = ic.Push(ctx, "folder1")
Expect(err).ToNot(HaveOccurred())
Expect(ic.currentPatterns).To(ConsistOf("*.txt", "*.mp3"))
ic.Pop()
Expect(ic.currentPatterns).To(Equal([]string{"*.txt"}))
err = ic.Push(ctx, "folder2")
Expect(err).ToNot(HaveOccurred())
Expect(ic.currentPatterns).To(ConsistOf("*.txt", "*.flac"))
ic.Pop()
Expect(ic.currentPatterns).To(Equal([]string{"*.txt"}))
ic.Pop()
Expect(ic.currentPatterns).To(BeEmpty())
})
})
})
Describe("PushAllParents", func() {
var ic *IgnoreChecker
var ctx context.Context
BeforeEach(func() {
ctx = context.Background()
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte("root.txt")},
"folder1/.ndignore": &fstest.MapFile{Data: []byte("level1.txt")},
"folder1/folder2/.ndignore": &fstest.MapFile{Data: []byte("level2.txt")},
"folder1/folder2/folder3/.ndignore": &fstest.MapFile{Data: []byte("level3.txt")},
}
ic = newIgnoreChecker(fsys)
})
DescribeTable("loading parent patterns",
func(targetPath string, expectedStackDepth int, expectedPatterns []string) {
err := ic.PushAllParents(ctx, targetPath)
Expect(err).ToNot(HaveOccurred())
Expect(len(ic.patternStack)).To(Equal(expectedStackDepth))
Expect(ic.currentPatterns).To(ConsistOf(expectedPatterns))
},
Entry("root path", ".", 1, []string{"root.txt"}),
Entry("empty path", "", 1, []string{"root.txt"}),
Entry("single level", "folder1", 2, []string{"root.txt", "level1.txt"}),
Entry("two levels", "folder1/folder2", 3, []string{"root.txt", "level1.txt", "level2.txt"}),
Entry("three levels", "folder1/folder2/folder3", 4, []string{"root.txt", "level1.txt", "level2.txt", "level3.txt"}),
)
It("should only compile patterns once at the end", func() {
// This is more of a behavioral test - we verify the matcher is not nil after PushAllParents
err := ic.PushAllParents(ctx, "folder1/folder2")
Expect(err).ToNot(HaveOccurred())
Expect(ic.matcher).ToNot(BeNil())
})
It("should handle paths with dot", func() {
err := ic.PushAllParents(ctx, "./folder1")
Expect(err).ToNot(HaveOccurred())
Expect(len(ic.patternStack)).To(Equal(2))
})
Context("when some parent folders have no .ndignore", func() {
BeforeEach(func() {
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte("root.txt")},
"folder1/folder2/.ndignore": &fstest.MapFile{Data: []byte("level2.txt")},
}
ic = newIgnoreChecker(fsys)
})
It("should still push all parent levels", func() {
err := ic.PushAllParents(ctx, "folder1/folder2")
Expect(err).ToNot(HaveOccurred())
Expect(len(ic.patternStack)).To(Equal(3)) // root, folder1 (empty), folder2
Expect(ic.currentPatterns).To(ConsistOf("root.txt", "level2.txt"))
})
})
})
Describe("ShouldIgnore", func() {
var ic *IgnoreChecker
var ctx context.Context
BeforeEach(func() {
ctx = context.Background()
})
Context("with no patterns loaded", func() {
It("should not ignore any path", func() {
fsys := fstest.MapFS{}
ic = newIgnoreChecker(fsys)
Expect(ic.ShouldIgnore(ctx, "anything.txt")).To(BeFalse())
Expect(ic.ShouldIgnore(ctx, "folder/file.mp3")).To(BeFalse())
})
})
Context("special paths", func() {
BeforeEach(func() {
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte("**/*")},
}
ic = newIgnoreChecker(fsys)
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
})
It("should never ignore root or empty paths", func() {
Expect(ic.ShouldIgnore(ctx, "")).To(BeFalse())
Expect(ic.ShouldIgnore(ctx, ".")).To(BeFalse())
})
It("should ignore all other paths with wildcard", func() {
Expect(ic.ShouldIgnore(ctx, "file.txt")).To(BeTrue())
Expect(ic.ShouldIgnore(ctx, "folder/file.mp3")).To(BeTrue())
})
})
DescribeTable("pattern matching",
func(pattern string, path string, shouldMatch bool) {
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte(pattern)},
}
ic = newIgnoreChecker(fsys)
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
Expect(ic.ShouldIgnore(ctx, path)).To(Equal(shouldMatch))
},
Entry("glob match", "*.txt", "file.txt", true),
Entry("glob no match", "*.txt", "file.mp3", false),
Entry("directory pattern match", "tmp/", "tmp/file.txt", true),
Entry("directory pattern no match", "tmp/", "temporary/file.txt", false),
Entry("nested glob match", "**/*.log", "deep/nested/file.log", true),
Entry("nested glob no match", "**/*.log", "deep/nested/file.txt", false),
Entry("specific file match", "ignore.me", "ignore.me", true),
Entry("specific file no match", "ignore.me", "keep.me", false),
Entry("wildcard all", "**/*", "any/path/file.txt", true),
Entry("nested specific match", "temp/*", "temp/cache.db", true),
Entry("nested specific no match", "temp/*", "temporary/cache.db", false),
)
Context("with multiple patterns", func() {
BeforeEach(func() {
fsys := fstest.MapFS{
".ndignore": &fstest.MapFile{Data: []byte("*.txt\n*.log\ntemp/")},
}
ic = newIgnoreChecker(fsys)
err := ic.Push(ctx, ".")
Expect(err).ToNot(HaveOccurred())
})
It("should match any of the patterns", func() {
Expect(ic.ShouldIgnore(ctx, "file.txt")).To(BeTrue())
Expect(ic.ShouldIgnore(ctx, "debug.log")).To(BeTrue())
Expect(ic.ShouldIgnore(ctx, "temp/cache")).To(BeTrue())
Expect(ic.ShouldIgnore(ctx, "music.mp3")).To(BeFalse())
})
})
})
})

View File

@ -26,58 +26,46 @@ import (
"github.com/navidrome/navidrome/utils/slice"
)
func createPhaseFolders(ctx context.Context, state *scanState, ds model.DataStore, cw artwork.CacheWarmer, libs []model.Library) *phaseFolders {
func createPhaseFolders(ctx context.Context, state *scanState, ds model.DataStore, cw artwork.CacheWarmer) *phaseFolders {
var jobs []*scanJob
var updatedLibs []model.Library
for _, lib := range libs {
if lib.LastScanStartedAt.IsZero() {
err := ds.Library(ctx).ScanBegin(lib.ID, state.fullScan)
if err != nil {
log.Error(ctx, "Scanner: Error updating last scan started at", "lib", lib.Name, err)
state.sendWarning(err.Error())
continue
}
// Reload library to get updated state
l, err := ds.Library(ctx).Get(lib.ID)
if err != nil {
log.Error(ctx, "Scanner: Error reloading library", "lib", lib.Name, err)
state.sendWarning(err.Error())
continue
}
lib = *l
} else {
log.Debug(ctx, "Scanner: Resuming previous scan", "lib", lib.Name, "lastScanStartedAt", lib.LastScanStartedAt, "fullScan", lib.FullScanInProgress)
// Create scan jobs for all libraries
for _, lib := range state.libraries {
// Get target folders for this library if selective scan
var targetFolders []string
if state.isSelectiveScan() {
targetFolders = state.targets[lib.ID]
}
job, err := newScanJob(ctx, ds, cw, lib, state.fullScan)
job, err := newScanJob(ctx, ds, cw, lib, state.fullScan, targetFolders)
if err != nil {
log.Error(ctx, "Scanner: Error creating scan context", "lib", lib.Name, err)
state.sendWarning(err.Error())
continue
}
jobs = append(jobs, job)
updatedLibs = append(updatedLibs, lib)
}
// Update the state with the libraries that have been processed and have their scan timestamps set
state.libraries = updatedLibs
return &phaseFolders{jobs: jobs, ctx: ctx, ds: ds, state: state}
}
type scanJob struct {
lib model.Library
fs storage.MusicFS
cw artwork.CacheWarmer
lastUpdates map[string]model.FolderUpdateInfo
lock sync.Mutex
numFolders atomic.Int64
lib model.Library
fs storage.MusicFS
cw artwork.CacheWarmer
lastUpdates map[string]model.FolderUpdateInfo // Holds last update info for all (DB) folders in this library
targetFolders []string // Specific folders to scan (including all descendants)
lock sync.Mutex
numFolders atomic.Int64
}
func newScanJob(ctx context.Context, ds model.DataStore, cw artwork.CacheWarmer, lib model.Library, fullScan bool) (*scanJob, error) {
lastUpdates, err := ds.Folder(ctx).GetLastUpdates(lib)
func newScanJob(ctx context.Context, ds model.DataStore, cw artwork.CacheWarmer, lib model.Library, fullScan bool, targetFolders []string) (*scanJob, error) {
// Get folder updates, optionally filtered to specific target folders
lastUpdates, err := ds.Folder(ctx).GetFolderUpdateInfo(lib, targetFolders...)
if err != nil {
return nil, fmt.Errorf("getting last updates: %w", err)
}
fileStore, err := storage.For(lib.Path)
if err != nil {
log.Error(ctx, "Error getting storage for library", "library", lib.Name, "path", lib.Path, err)
@ -88,15 +76,17 @@ func newScanJob(ctx context.Context, ds model.DataStore, cw artwork.CacheWarmer,
log.Error(ctx, "Error getting fs for library", "library", lib.Name, "path", lib.Path, err)
return nil, fmt.Errorf("getting fs for library: %w", err)
}
lib.FullScanInProgress = lib.FullScanInProgress || fullScan
return &scanJob{
lib: lib,
fs: fsys,
cw: cw,
lastUpdates: lastUpdates,
lib: lib,
fs: fsys,
cw: cw,
lastUpdates: lastUpdates,
targetFolders: targetFolders,
}, nil
}
// popLastUpdate retrieves and removes the last update info for the given folder ID
// This is used to track which folders have been found during the walk_dir_tree
func (j *scanJob) popLastUpdate(folderID string) model.FolderUpdateInfo {
j.lock.Lock()
defer j.lock.Unlock()
@ -106,6 +96,15 @@ func (j *scanJob) popLastUpdate(folderID string) model.FolderUpdateInfo {
return lastUpdate
}
// createFolderEntry creates a new folderEntry for the given path, using the last update info from the job
// to populate the previous update time and hash. It also removes the folder from the job's lastUpdates map.
// This is used to track which folders have been found during the walk_dir_tree.
func (j *scanJob) createFolderEntry(path string) *folderEntry {
id := model.FolderID(j.lib, path)
info := j.popLastUpdate(id)
return newFolderEntry(j, id, path, info.UpdatedAt, info.Hash)
}
// phaseFolders represents the first phase of the scanning process, which is responsible
// for scanning all libraries and importing new or updated files. This phase involves
// traversing the directory tree of each library, identifying new or modified media files,
@ -144,7 +143,8 @@ func (p *phaseFolders) producer() ppl.Producer[*folderEntry] {
if utils.IsCtxDone(p.ctx) {
break
}
outputChan, err := walkDirTree(p.ctx, job)
outputChan, err := walkDirTree(p.ctx, job, job.targetFolders...)
if err != nil {
log.Warn(p.ctx, "Scanner: Error scanning library", "lib", job.lib.Name, err)
}

View File

@ -69,9 +69,6 @@ func (p *phaseMissingTracks) produce(put func(tracks *missingTracks)) error {
}
}
for _, lib := range p.state.libraries {
if lib.LastScanStartedAt.IsZero() {
continue
}
log.Debug(p.ctx, "Scanner: Checking missing tracks", "libraryId", lib.ID, "libraryName", lib.Name)
cursor, err := p.ds.MediaFile(p.ctx).GetMissingAndMatching(lib.ID)
if err != nil {

View File

@ -27,14 +27,13 @@ import (
type phaseRefreshAlbums struct {
ds model.DataStore
ctx context.Context
libs model.Libraries
refreshed atomic.Uint32
skipped atomic.Uint32
state *scanState
}
func createPhaseRefreshAlbums(ctx context.Context, state *scanState, ds model.DataStore, libs model.Libraries) *phaseRefreshAlbums {
return &phaseRefreshAlbums{ctx: ctx, ds: ds, libs: libs, state: state}
func createPhaseRefreshAlbums(ctx context.Context, state *scanState, ds model.DataStore) *phaseRefreshAlbums {
return &phaseRefreshAlbums{ctx: ctx, ds: ds, state: state}
}
func (p *phaseRefreshAlbums) description() string {
@ -47,7 +46,7 @@ func (p *phaseRefreshAlbums) producer() ppl.Producer[*model.Album] {
func (p *phaseRefreshAlbums) produce(put func(album *model.Album)) error {
count := 0
for _, lib := range p.libs {
for _, lib := range p.state.libraries {
cursor, err := p.ds.Album(p.ctx).GetTouchedAlbums(lib.ID)
if err != nil {
return fmt.Errorf("loading touched albums: %w", err)

View File

@ -32,8 +32,8 @@ var _ = Describe("phaseRefreshAlbums", func() {
{ID: 1, Name: "Library 1"},
{ID: 2, Name: "Library 2"},
}
state = &scanState{}
phase = createPhaseRefreshAlbums(ctx, state, ds, libs)
state = &scanState{libraries: libs}
phase = createPhaseRefreshAlbums(ctx, state, ds)
})
Describe("description", func() {

View File

@ -3,6 +3,8 @@ package scanner
import (
"context"
"fmt"
"maps"
"slices"
"sync/atomic"
"time"
@ -15,6 +17,7 @@ import (
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/utils/run"
"github.com/navidrome/navidrome/utils/slice"
)
type scannerImpl struct {
@ -28,7 +31,8 @@ type scanState struct {
progress chan<- *ProgressInfo
fullScan bool
changesDetected atomic.Bool
libraries model.Libraries // Store libraries list for consistency across phases
libraries model.Libraries // Store libraries list for consistency across phases
targets map[int][]string // Optional: map[libraryID][]folderPaths for selective scans
}
func (s *scanState) sendProgress(info *ProgressInfo) {
@ -37,6 +41,10 @@ func (s *scanState) sendProgress(info *ProgressInfo) {
}
}
func (s *scanState) isSelectiveScan() bool {
return len(s.targets) > 0
}
func (s *scanState) sendWarning(msg string) {
s.sendProgress(&ProgressInfo{Warning: msg})
}
@ -45,7 +53,7 @@ func (s *scanState) sendError(err error) {
s.sendProgress(&ProgressInfo{Error: err.Error()})
}
func (s *scannerImpl) scanAll(ctx context.Context, fullScan bool, progress chan<- *ProgressInfo) {
func (s *scannerImpl) scanFolders(ctx context.Context, fullScan bool, targets []model.ScanTarget, progress chan<- *ProgressInfo) {
startTime := time.Now()
state := scanState{
@ -59,38 +67,75 @@ func (s *scannerImpl) scanAll(ctx context.Context, fullScan bool, progress chan<
state.changesDetected.Store(true)
}
libs, err := s.ds.Library(ctx).GetAll()
// Get libraries and optionally filter by targets
allLibs, err := s.ds.Library(ctx).GetAll()
if err != nil {
state.sendWarning(fmt.Sprintf("getting libraries: %s", err))
return
}
state.libraries = libs
log.Info(ctx, "Scanner: Starting scan", "fullScan", state.fullScan, "numLibraries", len(libs))
if len(targets) > 0 {
// Selective scan: filter libraries and build targets map
state.targets = make(map[int][]string)
for _, target := range targets {
folderPath := target.FolderPath
if folderPath == "" {
folderPath = "."
}
state.targets[target.LibraryID] = append(state.targets[target.LibraryID], folderPath)
}
// Filter libraries to only those in targets
state.libraries = slice.Filter(allLibs, func(lib model.Library) bool {
return len(state.targets[lib.ID]) > 0
})
log.Info(ctx, "Scanner: Starting selective scan", "fullScan", state.fullScan, "numLibraries", len(state.libraries), "numTargets", len(targets))
} else {
// Full library scan
state.libraries = allLibs
log.Info(ctx, "Scanner: Starting scan", "fullScan", state.fullScan, "numLibraries", len(state.libraries))
}
// Store scan type and start time
scanType := "quick"
if state.fullScan {
scanType = "full"
}
if state.isSelectiveScan() {
scanType += "-selective"
}
_ = s.ds.Property(ctx).Put(consts.LastScanTypeKey, scanType)
_ = s.ds.Property(ctx).Put(consts.LastScanStartTimeKey, startTime.Format(time.RFC3339))
// if there was a full scan in progress, force a full scan
if !state.fullScan {
for _, lib := range libs {
for _, lib := range state.libraries {
if lib.FullScanInProgress {
log.Info(ctx, "Scanner: Interrupted full scan detected", "lib", lib.Name)
state.fullScan = true
_ = s.ds.Property(ctx).Put(consts.LastScanTypeKey, "full")
if state.isSelectiveScan() {
_ = s.ds.Property(ctx).Put(consts.LastScanTypeKey, "full-selective")
} else {
_ = s.ds.Property(ctx).Put(consts.LastScanTypeKey, "full")
}
break
}
}
}
// Prepare libraries for scanning (initialize LastScanStartedAt if needed)
err = s.prepareLibrariesForScan(ctx, &state)
if err != nil {
log.Error(ctx, "Scanner: Error preparing libraries for scan", err)
state.sendError(err)
return
}
err = run.Sequentially(
// Phase 1: Scan all libraries and import new/updated files
runPhase[*folderEntry](ctx, 1, createPhaseFolders(ctx, &state, s.ds, s.cw, libs)),
runPhase[*folderEntry](ctx, 1, createPhaseFolders(ctx, &state, s.ds, s.cw)),
// Phase 2: Process missing files, checking for moves
runPhase[*missingTracks](ctx, 2, createPhaseMissingTracks(ctx, &state, s.ds)),
@ -98,7 +143,7 @@ func (s *scannerImpl) scanAll(ctx context.Context, fullScan bool, progress chan<
// Phases 3 and 4 can be run in parallel
run.Parallel(
// Phase 3: Refresh all new/changed albums and update artists
runPhase[*model.Album](ctx, 3, createPhaseRefreshAlbums(ctx, &state, s.ds, libs)),
runPhase[*model.Album](ctx, 3, createPhaseRefreshAlbums(ctx, &state, s.ds)),
// Phase 4: Import/update playlists
runPhase[*model.Folder](ctx, 4, createPhasePlaylists(ctx, &state, s.ds, s.pls, s.cw)),
@ -131,7 +176,53 @@ func (s *scannerImpl) scanAll(ctx context.Context, fullScan bool, progress chan<
state.sendProgress(&ProgressInfo{ChangesDetected: true})
}
log.Info(ctx, "Scanner: Finished scanning all libraries", "duration", time.Since(startTime))
if state.isSelectiveScan() {
log.Info(ctx, "Scanner: Finished scanning selected folders", "duration", time.Since(startTime), "numTargets", len(targets))
} else {
log.Info(ctx, "Scanner: Finished scanning all libraries", "duration", time.Since(startTime))
}
}
// prepareLibrariesForScan initializes the scan for all libraries in the state.
// It calls ScanBegin for libraries that haven't started scanning yet (LastScanStartedAt is zero),
// reloads them to get the updated state, and filters out any libraries that fail to initialize.
func (s *scannerImpl) prepareLibrariesForScan(ctx context.Context, state *scanState) error {
var successfulLibs []model.Library
for _, lib := range state.libraries {
if lib.LastScanStartedAt.IsZero() {
// This is a new scan - mark it as started
err := s.ds.Library(ctx).ScanBegin(lib.ID, state.fullScan)
if err != nil {
log.Error(ctx, "Scanner: Error marking scan start", "lib", lib.Name, err)
state.sendWarning(err.Error())
continue
}
// Reload library to get updated state (timestamps, etc.)
reloadedLib, err := s.ds.Library(ctx).Get(lib.ID)
if err != nil {
log.Error(ctx, "Scanner: Error reloading library", "lib", lib.Name, err)
state.sendWarning(err.Error())
continue
}
lib = *reloadedLib
} else {
// This is a resumed scan
log.Debug(ctx, "Scanner: Resuming previous scan", "lib", lib.Name,
"lastScanStartedAt", lib.LastScanStartedAt, "fullScan", lib.FullScanInProgress)
}
successfulLibs = append(successfulLibs, lib)
}
if len(successfulLibs) == 0 {
return fmt.Errorf("no libraries available for scanning")
}
// Update state with only successfully initialized libraries
state.libraries = successfulLibs
return nil
}
func (s *scannerImpl) runGC(ctx context.Context, state *scanState) func() error {
@ -140,7 +231,15 @@ func (s *scannerImpl) runGC(ctx context.Context, state *scanState) func() error
return s.ds.WithTx(func(tx model.DataStore) error {
if state.changesDetected.Load() {
start := time.Now()
err := tx.GC(ctx)
// For selective scans, extract library IDs to scope GC operations
var libraryIDs []int
if state.isSelectiveScan() {
libraryIDs = slices.Collect(maps.Keys(state.targets))
log.Debug(ctx, "Scanner: Running selective GC", "libraryIDs", libraryIDs)
}
err := tx.GC(ctx, libraryIDs...)
if err != nil {
log.Error(ctx, "Scanner: Error running GC", err)
return fmt.Errorf("running GC: %w", err)

View File

@ -32,7 +32,7 @@ var _ = Describe("Scanner - Multi-Library", Ordered, func() {
var ctx context.Context
var lib1, lib2 model.Library
var ds *tests.MockDataStore
var s scanner.Scanner
var s model.Scanner
createFS := func(path string, files fstest.MapFS) storagetest.FakeFS {
fs := storagetest.FakeFS{}

View File

@ -0,0 +1,293 @@
package scanner_test
import (
"context"
"path/filepath"
"testing/fstest"
"github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/artwork"
"github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/core/storage/storagetest"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/server/events"
"github.com/navidrome/navidrome/tests"
"github.com/navidrome/navidrome/utils/slice"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("ScanFolders", Ordered, func() {
var ctx context.Context
var lib model.Library
var ds model.DataStore
var s model.Scanner
var fsys storagetest.FakeFS
BeforeAll(func() {
ctx = request.WithUser(GinkgoT().Context(), model.User{ID: "123", IsAdmin: true})
tmpDir := GinkgoT().TempDir()
conf.Server.DbPath = filepath.Join(tmpDir, "test-selective-scan.db?_journal_mode=WAL")
log.Warn("Using DB at " + conf.Server.DbPath)
db.Db().SetMaxOpenConns(1)
})
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
conf.Server.MusicFolder = "fake:///music"
conf.Server.DevExternalScanner = false
db.Init(ctx)
DeferCleanup(func() {
Expect(tests.ClearDB()).To(Succeed())
})
ds = persistence.New(db.Db())
// Create the admin user in the database to match the context
adminUser := model.User{
ID: "123",
UserName: "admin",
Name: "Admin User",
IsAdmin: true,
NewPassword: "password",
}
Expect(ds.User(ctx).Put(&adminUser)).To(Succeed())
s = scanner.New(ctx, ds, artwork.NoopCacheWarmer(), events.NoopBroker(),
core.NewPlaylists(ds), metrics.NewNoopInstance())
lib = model.Library{ID: 1, Name: "Fake Library", Path: "fake:///music"}
Expect(ds.Library(ctx).Put(&lib)).To(Succeed())
// Initialize fake filesystem
fsys = storagetest.FakeFS{}
storagetest.Register("fake", &fsys)
})
Describe("Adding tracks to the library", func() {
It("scans specified folders recursively including all subdirectories", func() {
rock := template(_t{"albumartist": "Rock Artist", "album": "Rock Album"})
jazz := template(_t{"albumartist": "Jazz Artist", "album": "Jazz Album"})
pop := template(_t{"albumartist": "Pop Artist", "album": "Pop Album"})
createFS(fstest.MapFS{
"rock/track1.mp3": rock(track(1, "Rock Track 1")),
"rock/track2.mp3": rock(track(2, "Rock Track 2")),
"rock/subdir/track3.mp3": rock(track(3, "Rock Track 3")),
"jazz/track4.mp3": jazz(track(1, "Jazz Track 1")),
"jazz/subdir/track5.mp3": jazz(track(2, "Jazz Track 2")),
"pop/track6.mp3": pop(track(1, "Pop Track 1")),
})
// Scan only the "rock" and "jazz" folders (including their subdirectories)
targets := []model.ScanTarget{
{LibraryID: lib.ID, FolderPath: "rock"},
{LibraryID: lib.ID, FolderPath: "jazz"},
}
warnings, err := s.ScanFolders(ctx, false, targets)
Expect(err).ToNot(HaveOccurred())
Expect(warnings).To(BeEmpty())
// Verify all tracks in rock and jazz folders (including subdirectories) were imported
allFiles, err := ds.MediaFile(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
// Should have 5 tracks (all rock and jazz tracks including subdirectories)
Expect(allFiles).To(HaveLen(5))
// Get the file paths
paths := slice.Map(allFiles, func(mf model.MediaFile) string {
return filepath.ToSlash(mf.Path)
})
// Verify the correct files were scanned (including subdirectories)
Expect(paths).To(ContainElements(
"rock/track1.mp3",
"rock/track2.mp3",
"rock/subdir/track3.mp3",
"jazz/track4.mp3",
"jazz/subdir/track5.mp3",
))
// Verify files in the pop folder were NOT scanned
Expect(paths).ToNot(ContainElement("pop/track6.mp3"))
})
})
Describe("Deleting folders", func() {
Context("when a child folder is deleted", func() {
var (
revolver, help func(...map[string]any) *fstest.MapFile
artistFolderID string
album1FolderID string
album2FolderID string
album1TrackIDs []string
album2TrackIDs []string
)
BeforeEach(func() {
// Setup template functions for creating test files
revolver = storagetest.Template(_t{"albumartist": "The Beatles", "album": "Revolver", "year": 1966})
help = storagetest.Template(_t{"albumartist": "The Beatles", "album": "Help!", "year": 1965})
// Initial filesystem with nested folders
fsys.SetFiles(fstest.MapFS{
"The Beatles/Revolver/01 - Taxman.mp3": revolver(storagetest.Track(1, "Taxman")),
"The Beatles/Revolver/02 - Eleanor Rigby.mp3": revolver(storagetest.Track(2, "Eleanor Rigby")),
"The Beatles/Help!/01 - Help!.mp3": help(storagetest.Track(1, "Help!")),
"The Beatles/Help!/02 - The Night Before.mp3": help(storagetest.Track(2, "The Night Before")),
})
// First scan - import everything
_, err := s.ScanAll(ctx, true)
Expect(err).ToNot(HaveOccurred())
// Verify initial state - all folders exist
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"library_id": lib.ID}})
Expect(err).ToNot(HaveOccurred())
Expect(folders).To(HaveLen(4)) // root, Artist, Album1, Album2
// Store folder IDs for later verification
for _, f := range folders {
switch f.Name {
case "The Beatles":
artistFolderID = f.ID
case "Revolver":
album1FolderID = f.ID
case "Help!":
album2FolderID = f.ID
}
}
// Verify all tracks exist
allTracks, err := ds.MediaFile(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
Expect(allTracks).To(HaveLen(4))
// Store track IDs for later verification
for _, t := range allTracks {
if t.Album == "Revolver" {
album1TrackIDs = append(album1TrackIDs, t.ID)
} else if t.Album == "Help!" {
album2TrackIDs = append(album2TrackIDs, t.ID)
}
}
// Verify no tracks are missing initially
for _, t := range allTracks {
Expect(t.Missing).To(BeFalse())
}
})
It("should mark child folder and its tracks as missing when parent is scanned", func() {
// Delete the child folder (Help!) from the filesystem
fsys.SetFiles(fstest.MapFS{
"The Beatles/Revolver/01 - Taxman.mp3": revolver(storagetest.Track(1, "Taxman")),
"The Beatles/Revolver/02 - Eleanor Rigby.mp3": revolver(storagetest.Track(2, "Eleanor Rigby")),
// "The Beatles/Help!" folder and its contents are DELETED
})
// Run selective scan on the parent folder (Artist)
// This simulates what the watcher does when a child folder is deleted
_, err := s.ScanFolders(ctx, false, []model.ScanTarget{
{LibraryID: lib.ID, FolderPath: "The Beatles"},
})
Expect(err).ToNot(HaveOccurred())
// Verify the deleted child folder is now marked as missing
deletedFolder, err := ds.Folder(ctx).Get(album2FolderID)
Expect(err).ToNot(HaveOccurred())
Expect(deletedFolder.Missing).To(BeTrue(), "Deleted child folder should be marked as missing")
// Verify the deleted folder's tracks are marked as missing
for _, trackID := range album2TrackIDs {
track, err := ds.MediaFile(ctx).Get(trackID)
Expect(err).ToNot(HaveOccurred())
Expect(track.Missing).To(BeTrue(), "Track in deleted folder should be marked as missing")
}
// Verify the parent folder is still present and not marked as missing
parentFolder, err := ds.Folder(ctx).Get(artistFolderID)
Expect(err).ToNot(HaveOccurred())
Expect(parentFolder.Missing).To(BeFalse(), "Parent folder should not be marked as missing")
// Verify the sibling folder and its tracks are still present and not missing
siblingFolder, err := ds.Folder(ctx).Get(album1FolderID)
Expect(err).ToNot(HaveOccurred())
Expect(siblingFolder.Missing).To(BeFalse(), "Sibling folder should not be marked as missing")
for _, trackID := range album1TrackIDs {
track, err := ds.MediaFile(ctx).Get(trackID)
Expect(err).ToNot(HaveOccurred())
Expect(track.Missing).To(BeFalse(), "Track in sibling folder should not be marked as missing")
}
})
It("should mark deeply nested child folders as missing", func() {
// Add a deeply nested folder structure
fsys.SetFiles(fstest.MapFS{
"The Beatles/Revolver/01 - Taxman.mp3": revolver(storagetest.Track(1, "Taxman")),
"The Beatles/Revolver/02 - Eleanor Rigby.mp3": revolver(storagetest.Track(2, "Eleanor Rigby")),
"The Beatles/Help!/01 - Help!.mp3": help(storagetest.Track(1, "Help!")),
"The Beatles/Help!/02 - The Night Before.mp3": help(storagetest.Track(2, "The Night Before")),
"The Beatles/Help!/Bonus/01 - Bonus Track.mp3": help(storagetest.Track(99, "Bonus Track")),
"The Beatles/Help!/Bonus/Nested/01 - Deep Track.mp3": help(storagetest.Track(100, "Deep Track")),
})
// Rescan to import the new nested structure
_, err := s.ScanAll(ctx, true)
Expect(err).ToNot(HaveOccurred())
// Verify nested folders were created
allFolders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"library_id": lib.ID}})
Expect(err).ToNot(HaveOccurred())
Expect(len(allFolders)).To(BeNumerically(">", 4), "Should have more folders with nested structure")
// Now delete the entire Help! folder including nested children
fsys.SetFiles(fstest.MapFS{
"The Beatles/Revolver/01 - Taxman.mp3": revolver(storagetest.Track(1, "Taxman")),
"The Beatles/Revolver/02 - Eleanor Rigby.mp3": revolver(storagetest.Track(2, "Eleanor Rigby")),
// All Help! subfolders are deleted
})
// Run selective scan on parent
_, err = s.ScanFolders(ctx, false, []model.ScanTarget{
{LibraryID: lib.ID, FolderPath: "The Beatles"},
})
Expect(err).ToNot(HaveOccurred())
// Verify all Help! folders (including nested ones) are marked as missing
missingFolders, err := ds.Folder(ctx).GetAll(model.QueryOptions{
Filters: squirrel.And{
squirrel.Eq{"library_id": lib.ID},
squirrel.Eq{"missing": true},
},
})
Expect(err).ToNot(HaveOccurred())
Expect(len(missingFolders)).To(BeNumerically(">", 0), "At least one folder should be marked as missing")
// Verify all tracks in deleted folders are marked as missing
allTracks, err := ds.MediaFile(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
Expect(allTracks).To(HaveLen(6))
for _, track := range allTracks {
if track.Album == "Help!" {
Expect(track.Missing).To(BeTrue(), "All tracks in deleted Help! folder should be marked as missing")
} else if track.Album == "Revolver" {
Expect(track.Missing).To(BeFalse(), "Tracks in Revolver folder should not be marked as missing")
}
}
})
})
})
})

View File

@ -34,19 +34,19 @@ type _t = map[string]any
var template = storagetest.Template
var track = storagetest.Track
func createFS(files fstest.MapFS) storagetest.FakeFS {
fs := storagetest.FakeFS{}
fs.SetFiles(files)
storagetest.Register("fake", &fs)
return fs
}
var _ = Describe("Scanner", Ordered, func() {
var ctx context.Context
var lib model.Library
var ds *tests.MockDataStore
var mfRepo *mockMediaFileRepo
var s scanner.Scanner
createFS := func(files fstest.MapFS) storagetest.FakeFS {
fs := storagetest.FakeFS{}
fs.SetFiles(files)
storagetest.Register("fake", &fs)
return fs
}
var s model.Scanner
BeforeAll(func() {
ctx = request.WithUser(GinkgoT().Context(), model.User{ID: "123", IsAdmin: true})
@ -478,6 +478,56 @@ var _ = Describe("Scanner", Ordered, func() {
Expect(mf.Missing).To(BeFalse())
})
It("marks tracks as missing when scanning a deleted folder with ScanFolders", func() {
By("Adding a third track to Revolver to have more test data")
fsys.Add("The Beatles/Revolver/03 - I'm Only Sleeping.mp3", revolver(track(3, "I'm Only Sleeping")))
Expect(runScanner(ctx, false)).To(Succeed())
By("Verifying initial state has 5 tracks")
Expect(ds.MediaFile(ctx).CountAll(model.QueryOptions{
Filters: squirrel.Eq{"missing": false},
})).To(Equal(int64(5)))
By("Removing the entire Revolver folder from filesystem")
fsys.Remove("The Beatles/Revolver/01 - Taxman.mp3")
fsys.Remove("The Beatles/Revolver/02 - Eleanor Rigby.mp3")
fsys.Remove("The Beatles/Revolver/03 - I'm Only Sleeping.mp3")
By("Scanning the parent folder (simulating watcher behavior)")
targets := []model.ScanTarget{
{LibraryID: lib.ID, FolderPath: "The Beatles"},
}
_, err := s.ScanFolders(ctx, false, targets)
Expect(err).To(Succeed())
By("Checking all Revolver tracks are marked as missing")
mf, err := findByPath("The Beatles/Revolver/01 - Taxman.mp3")
Expect(err).ToNot(HaveOccurred())
Expect(mf.Missing).To(BeTrue())
mf, err = findByPath("The Beatles/Revolver/02 - Eleanor Rigby.mp3")
Expect(err).ToNot(HaveOccurred())
Expect(mf.Missing).To(BeTrue())
mf, err = findByPath("The Beatles/Revolver/03 - I'm Only Sleeping.mp3")
Expect(err).ToNot(HaveOccurred())
Expect(mf.Missing).To(BeTrue())
By("Checking the Help! tracks are not affected")
mf, err = findByPath("The Beatles/Help!/01 - Help!.mp3")
Expect(err).ToNot(HaveOccurred())
Expect(mf.Missing).To(BeFalse())
mf, err = findByPath("The Beatles/Help!/02 - The Night Before.mp3")
Expect(err).ToNot(HaveOccurred())
Expect(mf.Missing).To(BeFalse())
By("Verifying only 2 non-missing tracks remain (Help! tracks)")
Expect(ds.MediaFile(ctx).CountAll(model.QueryOptions{
Filters: squirrel.Eq{"missing": false},
})).To(Equal(int64(2)))
})
It("does not override artist fields when importing an undertagged file", func() {
By("Making sure artist in the DB contains MBID and sort name")
aa, err := ds.Artist(ctx).GetAll(model.QueryOptions{

View File

@ -1,7 +1,6 @@
package scanner
import (
"bufio"
"context"
"io/fs"
"maps"
@ -11,37 +10,69 @@ import (
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/utils"
ignore "github.com/sabhiram/go-gitignore"
)
func walkDirTree(ctx context.Context, job *scanJob) (<-chan *folderEntry, error) {
// walkDirTree recursively walks the directory tree starting from the given targetFolders.
// If no targetFolders are provided, it starts from the root folder (".").
// It returns a channel of folderEntry pointers representing each folder found.
func walkDirTree(ctx context.Context, job *scanJob, targetFolders ...string) (<-chan *folderEntry, error) {
results := make(chan *folderEntry)
folders := targetFolders
if len(targetFolders) == 0 {
// No specific folders provided, scan the root folder
folders = []string{"."}
}
go func() {
defer close(results)
err := walkFolder(ctx, job, ".", nil, results)
if err != nil {
log.Error(ctx, "Scanner: There were errors reading directories from filesystem", "path", job.lib.Path, err)
return
for _, folderPath := range folders {
if utils.IsCtxDone(ctx) {
return
}
// Check if target folder exists before walking it
// If it doesn't exist (e.g., deleted between watcher detection and scan execution),
// skip it so it remains in job.lastUpdates and gets handled in following steps
_, err := fs.Stat(job.fs, folderPath)
if err != nil {
log.Warn(ctx, "Scanner: Target folder does not exist.", "path", folderPath, err)
continue
}
// Create checker and push patterns from root to this folder
checker := newIgnoreChecker(job.fs)
err = checker.PushAllParents(ctx, folderPath)
if err != nil {
log.Error(ctx, "Scanner: Error pushing ignore patterns for target folder", "path", folderPath, err)
continue
}
// Recursively walk this folder and all its children
err = walkFolder(ctx, job, folderPath, checker, results)
if err != nil {
log.Error(ctx, "Scanner: Error walking target folder", "path", folderPath, err)
continue
}
}
log.Debug(ctx, "Scanner: Finished reading folders", "lib", job.lib.Name, "path", job.lib.Path, "numFolders", job.numFolders.Load())
log.Debug(ctx, "Scanner: Finished reading target folders", "lib", job.lib.Name, "path", job.lib.Path, "numFolders", job.numFolders.Load())
}()
return results, nil
}
func walkFolder(ctx context.Context, job *scanJob, currentFolder string, ignorePatterns []string, results chan<- *folderEntry) error {
ignorePatterns = loadIgnoredPatterns(ctx, job.fs, currentFolder, ignorePatterns)
func walkFolder(ctx context.Context, job *scanJob, currentFolder string, checker *IgnoreChecker, results chan<- *folderEntry) error {
// Push patterns for this folder onto the stack
_ = checker.Push(ctx, currentFolder)
defer checker.Pop() // Pop patterns when leaving this folder
folder, children, err := loadDir(ctx, job, currentFolder, ignorePatterns)
folder, children, err := loadDir(ctx, job, currentFolder, checker)
if err != nil {
log.Warn(ctx, "Scanner: Error loading dir. Skipping", "path", currentFolder, err)
return nil
}
for _, c := range children {
err := walkFolder(ctx, job, c, ignorePatterns, results)
err := walkFolder(ctx, job, c, checker, results)
if err != nil {
return err
}
@ -59,50 +90,17 @@ func walkFolder(ctx context.Context, job *scanJob, currentFolder string, ignoreP
return nil
}
func loadIgnoredPatterns(ctx context.Context, fsys fs.FS, currentFolder string, currentPatterns []string) []string {
ignoreFilePath := path.Join(currentFolder, consts.ScanIgnoreFile)
var newPatterns []string
if _, err := fs.Stat(fsys, ignoreFilePath); err == nil {
// Read and parse the .ndignore file
ignoreFile, err := fsys.Open(ignoreFilePath)
if err != nil {
log.Warn(ctx, "Scanner: Error opening .ndignore file", "path", ignoreFilePath, err)
// Continue with previous patterns
} else {
defer ignoreFile.Close()
scanner := bufio.NewScanner(ignoreFile)
for scanner.Scan() {
line := scanner.Text()
if line == "" || strings.HasPrefix(line, "#") {
continue // Skip empty lines and comments
}
newPatterns = append(newPatterns, line)
}
if err := scanner.Err(); err != nil {
log.Warn(ctx, "Scanner: Error reading .ignore file", "path", ignoreFilePath, err)
}
}
// If the .ndignore file is empty, mimic the current behavior and ignore everything
if len(newPatterns) == 0 {
log.Trace(ctx, "Scanner: .ndignore file is empty, ignoring everything", "path", currentFolder)
newPatterns = []string{"**/*"}
} else {
log.Trace(ctx, "Scanner: .ndignore file found ", "path", ignoreFilePath, "patterns", newPatterns)
}
}
// Combine the patterns from the .ndignore file with the ones passed as argument
combinedPatterns := append([]string{}, currentPatterns...)
return append(combinedPatterns, newPatterns...)
}
func loadDir(ctx context.Context, job *scanJob, dirPath string, ignorePatterns []string) (folder *folderEntry, children []string, err error) {
folder = newFolderEntry(job, dirPath)
func loadDir(ctx context.Context, job *scanJob, dirPath string, checker *IgnoreChecker) (folder *folderEntry, children []string, err error) {
// Check if directory exists before creating the folder entry
// This is important to avoid removing the folder from lastUpdates if it doesn't exist
dirInfo, err := fs.Stat(job.fs, dirPath)
if err != nil {
log.Warn(ctx, "Scanner: Error stating dir", "path", dirPath, err)
return nil, nil, err
}
// Now that we know the folder exists, create the entry (which removes it from lastUpdates)
folder = job.createFolderEntry(dirPath)
folder.modTime = dirInfo.ModTime()
dir, err := job.fs.Open(dirPath)
@ -117,12 +115,11 @@ func loadDir(ctx context.Context, job *scanJob, dirPath string, ignorePatterns [
return folder, children, err
}
ignoreMatcher := ignore.CompileIgnoreLines(ignorePatterns...)
entries := fullReadDir(ctx, dirFile)
children = make([]string, 0, len(entries))
for _, entry := range entries {
entryPath := path.Join(dirPath, entry.Name())
if len(ignorePatterns) > 0 && isScanIgnored(ctx, ignoreMatcher, entryPath) {
if checker.ShouldIgnore(ctx, entryPath) {
log.Trace(ctx, "Scanner: Ignoring entry", "path", entryPath)
continue
}
@ -234,6 +231,7 @@ func isDirReadable(ctx context.Context, fsys fs.FS, dirPath string) bool {
var ignoredDirs = []string{
"$RECYCLE.BIN",
"#snapshot",
"@Recycle",
"@Recently-Snapshot",
".streams",
"lost+found",
@ -254,11 +252,3 @@ func isDirIgnored(name string) bool {
func isEntryIgnored(name string) bool {
return strings.HasPrefix(name, ".") && !strings.HasPrefix(name, "..")
}
func isScanIgnored(ctx context.Context, matcher *ignore.GitIgnore, entryPath string) bool {
matches := matcher.MatchesPath(entryPath)
if matches {
log.Trace(ctx, "Scanner: Ignoring entry matching .ndignore: ", "path", entryPath)
}
return matches
}

View File

@ -25,82 +25,196 @@ var _ = Describe("walk_dir_tree", func() {
ctx context.Context
)
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
ctx = GinkgoT().Context()
fsys = &mockMusicFS{
FS: fstest.MapFS{
"root/a/.ndignore": {Data: []byte("ignored/*")},
"root/a/f1.mp3": {},
"root/a/f2.mp3": {},
"root/a/ignored/bad.mp3": {},
"root/b/cover.jpg": {},
"root/c/f3": {},
"root/d": {},
"root/d/.ndignore": {},
"root/d/f1.mp3": {},
"root/d/f2.mp3": {},
"root/d/f3.mp3": {},
"root/e/original/f1.mp3": {},
"root/e/symlink": {Mode: fs.ModeSymlink, Data: []byte("root/e/original")},
Context("full library", func() {
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
ctx = GinkgoT().Context()
fsys = &mockMusicFS{
FS: fstest.MapFS{
"root/a/.ndignore": {Data: []byte("ignored/*")},
"root/a/f1.mp3": {},
"root/a/f2.mp3": {},
"root/a/ignored/bad.mp3": {},
"root/b/cover.jpg": {},
"root/c/f3": {},
"root/d": {},
"root/d/.ndignore": {},
"root/d/f1.mp3": {},
"root/d/f2.mp3": {},
"root/d/f3.mp3": {},
"root/e/original/f1.mp3": {},
"root/e/symlink": {Mode: fs.ModeSymlink, Data: []byte("original")},
},
}
job = &scanJob{
fs: fsys,
lib: model.Library{Path: "/music"},
}
})
// Helper function to call walkDirTree and collect folders from the results channel
getFolders := func() map[string]*folderEntry {
results, err := walkDirTree(ctx, job)
Expect(err).ToNot(HaveOccurred())
folders := map[string]*folderEntry{}
g := errgroup.Group{}
g.Go(func() error {
for folder := range results {
folders[folder.path] = folder
}
return nil
})
_ = g.Wait()
return folders
}
DescribeTable("symlink handling",
func(followSymlinks bool, expectedFolderCount int) {
conf.Server.Scanner.FollowSymlinks = followSymlinks
folders := getFolders()
Expect(folders).To(HaveLen(expectedFolderCount + 2)) // +2 for `.` and `root`
// Basic folder structure checks
Expect(folders["root/a"].audioFiles).To(SatisfyAll(
HaveLen(2),
HaveKey("f1.mp3"),
HaveKey("f2.mp3"),
))
Expect(folders["root/a"].imageFiles).To(BeEmpty())
Expect(folders["root/b"].audioFiles).To(BeEmpty())
Expect(folders["root/b"].imageFiles).To(SatisfyAll(
HaveLen(1),
HaveKey("cover.jpg"),
))
Expect(folders["root/c"].audioFiles).To(BeEmpty())
Expect(folders["root/c"].imageFiles).To(BeEmpty())
Expect(folders).ToNot(HaveKey("root/d"))
// Symlink specific checks
if followSymlinks {
Expect(folders["root/e/symlink"].audioFiles).To(HaveLen(1))
} else {
Expect(folders).ToNot(HaveKey("root/e/symlink"))
}
},
}
job = &scanJob{
fs: fsys,
lib: model.Library{Path: "/music"},
}
Entry("with symlinks enabled", true, 7),
Entry("with symlinks disabled", false, 6),
)
})
// Helper function to call walkDirTree and collect folders from the results channel
getFolders := func() map[string]*folderEntry {
results, err := walkDirTree(ctx, job)
Expect(err).ToNot(HaveOccurred())
folders := map[string]*folderEntry{}
g := errgroup.Group{}
g.Go(func() error {
for folder := range results {
folders[folder.path] = folder
Context("with target folders", func() {
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
ctx = GinkgoT().Context()
fsys = &mockMusicFS{
FS: fstest.MapFS{
"Artist/Album1/track1.mp3": {},
"Artist/Album1/track2.mp3": {},
"Artist/Album2/track1.mp3": {},
"Artist/Album2/track2.mp3": {},
"Artist/Album2/Sub/track3.mp3": {},
"OtherArtist/Album3/track1.mp3": {},
},
}
job = &scanJob{
fs: fsys,
lib: model.Library{Path: "/music"},
}
return nil
})
_ = g.Wait()
return folders
}
DescribeTable("symlink handling",
func(followSymlinks bool, expectedFolderCount int) {
conf.Server.Scanner.FollowSymlinks = followSymlinks
folders := getFolders()
It("should recursively walk all subdirectories of target folders", func() {
results, err := walkDirTree(ctx, job, "Artist")
Expect(err).ToNot(HaveOccurred())
Expect(folders).To(HaveLen(expectedFolderCount + 2)) // +2 for `.` and `root`
folders := map[string]*folderEntry{}
g := errgroup.Group{}
g.Go(func() error {
for folder := range results {
folders[folder.path] = folder
}
return nil
})
_ = g.Wait()
// Basic folder structure checks
Expect(folders["root/a"].audioFiles).To(SatisfyAll(
HaveLen(2),
HaveKey("f1.mp3"),
HaveKey("f2.mp3"),
// Should include the target folder and all its descendants
Expect(folders).To(SatisfyAll(
HaveKey("Artist"),
HaveKey("Artist/Album1"),
HaveKey("Artist/Album2"),
HaveKey("Artist/Album2/Sub"),
))
Expect(folders["root/a"].imageFiles).To(BeEmpty())
Expect(folders["root/b"].audioFiles).To(BeEmpty())
Expect(folders["root/b"].imageFiles).To(SatisfyAll(
HaveLen(1),
HaveKey("cover.jpg"),
))
Expect(folders["root/c"].audioFiles).To(BeEmpty())
Expect(folders["root/c"].imageFiles).To(BeEmpty())
Expect(folders).ToNot(HaveKey("root/d"))
// Symlink specific checks
if followSymlinks {
Expect(folders["root/e/symlink"].audioFiles).To(HaveLen(1))
} else {
Expect(folders).ToNot(HaveKey("root/e/symlink"))
// Should not include folders outside the target
Expect(folders).ToNot(HaveKey("OtherArtist"))
Expect(folders).ToNot(HaveKey("OtherArtist/Album3"))
// Verify audio files are present
Expect(folders["Artist/Album1"].audioFiles).To(HaveLen(2))
Expect(folders["Artist/Album2"].audioFiles).To(HaveLen(2))
Expect(folders["Artist/Album2/Sub"].audioFiles).To(HaveLen(1))
})
It("should handle multiple target folders", func() {
results, err := walkDirTree(ctx, job, "Artist/Album1", "OtherArtist")
Expect(err).ToNot(HaveOccurred())
folders := map[string]*folderEntry{}
g := errgroup.Group{}
g.Go(func() error {
for folder := range results {
folders[folder.path] = folder
}
return nil
})
_ = g.Wait()
// Should include both target folders and their descendants
Expect(folders).To(SatisfyAll(
HaveKey("Artist/Album1"),
HaveKey("OtherArtist"),
HaveKey("OtherArtist/Album3"),
))
// Should not include other folders
Expect(folders).ToNot(HaveKey("Artist"))
Expect(folders).ToNot(HaveKey("Artist/Album2"))
Expect(folders).ToNot(HaveKey("Artist/Album2/Sub"))
})
It("should skip non-existent target folders and preserve them in lastUpdates", func() {
// Setup job with lastUpdates for both existing and non-existing folders
job.lastUpdates = map[string]model.FolderUpdateInfo{
model.FolderID(job.lib, "Artist/Album1"): {},
model.FolderID(job.lib, "NonExistent/DeletedFolder"): {},
model.FolderID(job.lib, "OtherArtist/Album3"): {},
}
},
Entry("with symlinks enabled", true, 7),
Entry("with symlinks disabled", false, 6),
)
// Try to scan existing folder and non-existing folder
results, err := walkDirTree(ctx, job, "Artist/Album1", "NonExistent/DeletedFolder")
Expect(err).ToNot(HaveOccurred())
// Collect results
folders := map[string]struct{}{}
for folder := range results {
folders[folder.path] = struct{}{}
}
// Should only include the existing folder
Expect(folders).To(HaveKey("Artist/Album1"))
Expect(folders).ToNot(HaveKey("NonExistent/DeletedFolder"))
// The non-existent folder should still be in lastUpdates (not removed by popLastUpdate)
Expect(job.lastUpdates).To(HaveKey(model.FolderID(job.lib, "NonExistent/DeletedFolder")))
// The existing folder should have been removed from lastUpdates
Expect(job.lastUpdates).ToNot(HaveKey(model.FolderID(job.lib, "Artist/Album1")))
// Folders not in targets should remain in lastUpdates
Expect(job.lastUpdates).To(HaveKey(model.FolderID(job.lib, "OtherArtist/Album3")))
})
})
})
Describe("helper functions", func() {

View File

@ -24,9 +24,9 @@ type Watcher interface {
type watcher struct {
mainCtx context.Context
ds model.DataStore
scanner Scanner
scanner model.Scanner
triggerWait time.Duration
watcherNotify chan model.Library
watcherNotify chan scanNotification
libraryWatchers map[int]*libraryWatcherInstance
mu sync.RWMutex
}
@ -36,14 +36,19 @@ type libraryWatcherInstance struct {
cancel context.CancelFunc
}
type scanNotification struct {
Library *model.Library
FolderPath string
}
// GetWatcher returns the watcher singleton
func GetWatcher(ds model.DataStore, s Scanner) Watcher {
func GetWatcher(ds model.DataStore, s model.Scanner) Watcher {
return singleton.GetInstance(func() *watcher {
return &watcher{
ds: ds,
scanner: s,
triggerWait: conf.Server.Scanner.WatcherWait,
watcherNotify: make(chan model.Library, 1),
watcherNotify: make(chan scanNotification, 1),
libraryWatchers: make(map[int]*libraryWatcherInstance),
}
})
@ -68,11 +73,11 @@ func (w *watcher) Run(ctx context.Context) error {
// Main scan triggering loop
trigger := time.NewTimer(w.triggerWait)
trigger.Stop()
waiting := false
targets := make(map[model.ScanTarget]struct{})
for {
select {
case <-trigger.C:
log.Info("Watcher: Triggering scan")
log.Info("Watcher: Triggering scan for changed folders", "numTargets", len(targets))
status, err := w.scanner.Status(ctx)
if err != nil {
log.Error(ctx, "Watcher: Error retrieving Scanner status", err)
@ -83,9 +88,23 @@ func (w *watcher) Run(ctx context.Context) error {
trigger.Reset(w.triggerWait * 3)
continue
}
waiting = false
// Convert targets map to slice
targetSlice := make([]model.ScanTarget, 0, len(targets))
for target := range targets {
targetSlice = append(targetSlice, target)
}
// Clear targets for next batch
targets = make(map[model.ScanTarget]struct{})
go func() {
_, err := w.scanner.ScanAll(ctx, false)
var err error
if conf.Server.DevSelectiveWatcher {
_, err = w.scanner.ScanFolders(ctx, false, targetSlice)
} else {
_, err = w.scanner.ScanAll(ctx, false)
}
if err != nil {
log.Error(ctx, "Watcher: Error scanning", err)
} else {
@ -102,13 +121,22 @@ func (w *watcher) Run(ctx context.Context) error {
w.libraryWatchers = make(map[int]*libraryWatcherInstance)
w.mu.Unlock()
return nil
case lib := <-w.watcherNotify:
if !waiting {
log.Debug(ctx, "Watcher: Detected changes. Waiting for more changes before triggering scan",
"libraryID", lib.ID, "name", lib.Name, "path", lib.Path)
waiting = true
}
case notification := <-w.watcherNotify:
// Reset the trigger timer for debounce
trigger.Reset(w.triggerWait)
lib := notification.Library
folderPath := notification.FolderPath
// If already scheduled for scan, skip
target := model.ScanTarget{LibraryID: lib.ID, FolderPath: folderPath}
if _, exists := targets[target]; exists {
continue
}
targets[target] = struct{}{}
log.Debug(ctx, "Watcher: Detected changes. Waiting for more changes before triggering scan",
"libraryID", lib.ID, "name", lib.Name, "path", lib.Path, "folderPath", folderPath)
}
}
}
@ -199,13 +227,18 @@ func (w *watcher) watchLibrary(ctx context.Context, lib *model.Library) error {
log.Info(ctx, "Watcher started for library", "libraryID", lib.ID, "name", lib.Name, "path", lib.Path, "absoluteLibPath", absLibPath)
return w.processLibraryEvents(ctx, lib, fsys, c, absLibPath)
}
// processLibraryEvents processes filesystem events for a library.
func (w *watcher) processLibraryEvents(ctx context.Context, lib *model.Library, fsys storage.MusicFS, events <-chan string, absLibPath string) error {
for {
select {
case <-ctx.Done():
log.Debug(ctx, "Watcher stopped due to context cancellation", "libraryID", lib.ID, "name", lib.Name)
return nil
case path := <-c:
path, err = filepath.Rel(absLibPath, path)
case path := <-events:
path, err := filepath.Rel(absLibPath, path)
if err != nil {
log.Error(ctx, "Error getting relative path", "libraryID", lib.ID, "absolutePath", absLibPath, "path", path, err)
continue
@ -215,12 +248,27 @@ func (w *watcher) watchLibrary(ctx context.Context, lib *model.Library) error {
log.Trace(ctx, "Ignoring change", "libraryID", lib.ID, "path", path)
continue
}
log.Trace(ctx, "Detected change", "libraryID", lib.ID, "path", path, "absoluteLibPath", absLibPath)
// Check if the original path (before resolution) matches .ndignore patterns
// This is crucial for deleted folders - if a deleted folder matches .ndignore,
// we should ignore it BEFORE resolveFolderPath walks up to the parent
if w.shouldIgnoreFolderPath(ctx, fsys, path) {
log.Debug(ctx, "Ignoring change matching .ndignore pattern", "libraryID", lib.ID, "path", path)
continue
}
// Find the folder to scan - validate path exists as directory, walk up if needed
folderPath := resolveFolderPath(fsys, path)
// Double-check after resolution in case the resolved path is different and also matches patterns
if folderPath != path && w.shouldIgnoreFolderPath(ctx, fsys, folderPath) {
log.Trace(ctx, "Ignoring change in folder matching .ndignore pattern", "libraryID", lib.ID, "folderPath", folderPath)
continue
}
// Notify the main watcher of changes
select {
case w.watcherNotify <- *lib:
case w.watcherNotify <- scanNotification{Library: lib, FolderPath: folderPath}:
default:
// Channel is full, notification already pending
}
@ -228,6 +276,47 @@ func (w *watcher) watchLibrary(ctx context.Context, lib *model.Library) error {
}
}
// resolveFolderPath takes a path (which may be a file or directory) and returns
// the folder path to scan. If the path is a file, it walks up to find the parent
// directory. Returns empty string if the path should scan the library root.
func resolveFolderPath(fsys fs.FS, path string) string {
// Handle root paths immediately
if path == "." || path == "" {
return ""
}
folderPath := path
for {
info, err := fs.Stat(fsys, folderPath)
if err == nil && info.IsDir() {
// Found a valid directory
return folderPath
}
if folderPath == "." || folderPath == "" {
// Reached root, scan entire library
return ""
}
// Walk up the tree
dir, _ := filepath.Split(folderPath)
if dir == "" || dir == "." {
return ""
}
// Remove trailing slash
folderPath = filepath.Clean(dir)
}
}
// shouldIgnoreFolderPath checks if the given folderPath should be ignored based on .ndignore patterns
// in the library. It pushes all parent folders onto the IgnoreChecker stack before checking.
func (w *watcher) shouldIgnoreFolderPath(ctx context.Context, fsys storage.MusicFS, folderPath string) bool {
checker := newIgnoreChecker(fsys)
err := checker.PushAllParents(ctx, folderPath)
if err != nil {
log.Warn(ctx, "Watcher: Error pushing ignore patterns for folder", "path", folderPath, err)
}
return checker.ShouldIgnore(ctx, folderPath)
}
func isIgnoredPath(_ context.Context, _ fs.FS, path string) bool {
baseDir, name := filepath.Split(path)
switch {

491
scanner/watcher_test.go Normal file
View File

@ -0,0 +1,491 @@
package scanner
import (
"context"
"io/fs"
"path/filepath"
"testing/fstest"
"time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Watcher", func() {
var ctx context.Context
var cancel context.CancelFunc
var mockScanner *tests.MockScanner
var mockDS *tests.MockDataStore
var w *watcher
var lib *model.Library
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
conf.Server.Scanner.WatcherWait = 50 * time.Millisecond // Short wait for tests
ctx, cancel = context.WithCancel(context.Background())
DeferCleanup(cancel)
lib = &model.Library{
ID: 1,
Name: "Test Library",
Path: "/test/library",
}
// Set up mocks
mockScanner = tests.NewMockScanner()
mockDS = &tests.MockDataStore{}
mockLibRepo := &tests.MockLibraryRepo{}
mockLibRepo.SetData(model.Libraries{*lib})
mockDS.MockedLibrary = mockLibRepo
// Create a new watcher instance (not singleton) for testing
w = &watcher{
ds: mockDS,
scanner: mockScanner,
triggerWait: conf.Server.Scanner.WatcherWait,
watcherNotify: make(chan scanNotification, 10),
libraryWatchers: make(map[int]*libraryWatcherInstance),
mainCtx: ctx,
}
})
Describe("Target Collection and Deduplication", func() {
BeforeEach(func() {
// Start watcher in background
go func() {
_ = w.Run(ctx)
}()
// Give watcher time to initialize
time.Sleep(10 * time.Millisecond)
})
It("creates separate targets for different folders", func() {
// Send notifications for different folders
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1"}
time.Sleep(10 * time.Millisecond)
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist2"}
// Wait for watcher to process and trigger scan
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
// Verify two targets
calls := mockScanner.GetScanFoldersCalls()
Expect(calls).To(HaveLen(1))
Expect(calls[0].Targets).To(HaveLen(2))
// Extract folder paths
folderPaths := make(map[string]bool)
for _, target := range calls[0].Targets {
Expect(target.LibraryID).To(Equal(1))
folderPaths[target.FolderPath] = true
}
Expect(folderPaths).To(HaveKey("artist1"))
Expect(folderPaths).To(HaveKey("artist2"))
})
It("handles different folder paths correctly", func() {
// Send notification for nested folder
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1/album1"}
// Wait for watcher to process and trigger scan
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
// Verify the target
calls := mockScanner.GetScanFoldersCalls()
Expect(calls).To(HaveLen(1))
Expect(calls[0].Targets).To(HaveLen(1))
Expect(calls[0].Targets[0].FolderPath).To(Equal("artist1/album1"))
})
It("deduplicates folder and file within same folder", func() {
// Send notification for a folder
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1/album1"}
time.Sleep(10 * time.Millisecond)
// Send notification for same folder (as if file change was detected there)
// In practice, watchLibrary() would walk up from file path to folder
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1/album1"}
time.Sleep(10 * time.Millisecond)
// Send another for same folder
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1/album1"}
// Wait for watcher to process and trigger scan
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
// Verify only one target despite multiple file/folder changes
calls := mockScanner.GetScanFoldersCalls()
Expect(calls).To(HaveLen(1))
Expect(calls[0].Targets).To(HaveLen(1))
Expect(calls[0].Targets[0].FolderPath).To(Equal("artist1/album1"))
})
})
Describe("Timer Behavior", func() {
BeforeEach(func() {
// Start watcher in background
go func() {
_ = w.Run(ctx)
}()
// Give watcher time to initialize
time.Sleep(10 * time.Millisecond)
})
It("resets timer on each change (debouncing)", func() {
// Send first notification
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1"}
// Wait a bit less than half the watcher wait time to ensure timer doesn't fire
time.Sleep(20 * time.Millisecond)
// No scan should have been triggered yet
Expect(mockScanner.GetScanFoldersCallCount()).To(Equal(0))
// Send another notification (resets timer)
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1"}
// Wait a bit less than half the watcher wait time again
time.Sleep(20 * time.Millisecond)
// Still no scan
Expect(mockScanner.GetScanFoldersCallCount()).To(Equal(0))
// Wait for full timer to expire after last notification (plus margin)
time.Sleep(60 * time.Millisecond)
// Now scan should have been triggered
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 100*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
})
It("triggers scan after quiet period", func() {
// Send notification
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1"}
// No scan immediately
Expect(mockScanner.GetScanFoldersCallCount()).To(Equal(0))
// Wait for quiet period
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
})
})
Describe("Empty and Root Paths", func() {
BeforeEach(func() {
// Start watcher in background
go func() {
_ = w.Run(ctx)
}()
// Give watcher time to initialize
time.Sleep(10 * time.Millisecond)
})
It("handles empty folder path (library root)", func() {
// Send notification with empty folder path
w.watcherNotify <- scanNotification{Library: lib, FolderPath: ""}
// Wait for scan
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
// Should scan the library root
calls := mockScanner.GetScanFoldersCalls()
Expect(calls).To(HaveLen(1))
Expect(calls[0].Targets).To(HaveLen(1))
Expect(calls[0].Targets[0].FolderPath).To(Equal(""))
})
It("deduplicates empty and dot paths", func() {
// Send notifications with empty and dot paths
w.watcherNotify <- scanNotification{Library: lib, FolderPath: ""}
time.Sleep(10 * time.Millisecond)
w.watcherNotify <- scanNotification{Library: lib, FolderPath: ""}
// Wait for scan
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
// Should have only one target
calls := mockScanner.GetScanFoldersCalls()
Expect(calls).To(HaveLen(1))
Expect(calls[0].Targets).To(HaveLen(1))
})
})
Describe("Multiple Libraries", func() {
var lib2 *model.Library
BeforeEach(func() {
// Create second library
lib2 = &model.Library{
ID: 2,
Name: "Test Library 2",
Path: "/test/library2",
}
mockLibRepo := mockDS.MockedLibrary.(*tests.MockLibraryRepo)
mockLibRepo.SetData(model.Libraries{*lib, *lib2})
// Start watcher in background
go func() {
_ = w.Run(ctx)
}()
// Give watcher time to initialize
time.Sleep(10 * time.Millisecond)
})
It("creates separate targets for different libraries", func() {
// Send notifications for both libraries
w.watcherNotify <- scanNotification{Library: lib, FolderPath: "artist1"}
time.Sleep(10 * time.Millisecond)
w.watcherNotify <- scanNotification{Library: lib2, FolderPath: "artist2"}
// Wait for scan
Eventually(func() int {
return mockScanner.GetScanFoldersCallCount()
}, 200*time.Millisecond, 10*time.Millisecond).Should(Equal(1))
// Verify two targets for different libraries
calls := mockScanner.GetScanFoldersCalls()
Expect(calls).To(HaveLen(1))
Expect(calls[0].Targets).To(HaveLen(2))
// Verify library IDs are different
libraryIDs := make(map[int]bool)
for _, target := range calls[0].Targets {
libraryIDs[target.LibraryID] = true
}
Expect(libraryIDs).To(HaveKey(1))
Expect(libraryIDs).To(HaveKey(2))
})
})
Describe(".ndignore handling", func() {
var ctx context.Context
var cancel context.CancelFunc
var w *watcher
var mockFS *mockMusicFS
var lib *model.Library
var eventChan chan string
var absLibPath string
BeforeEach(func() {
ctx, cancel = context.WithCancel(GinkgoT().Context())
DeferCleanup(cancel)
// Set up library
var err error
absLibPath, err = filepath.Abs(".")
Expect(err).NotTo(HaveOccurred())
lib = &model.Library{
ID: 1,
Name: "Test Library",
Path: absLibPath,
}
// Create watcher with notification channel
w = &watcher{
watcherNotify: make(chan scanNotification, 10),
}
eventChan = make(chan string, 10)
})
// Helper to send an event - converts relative path to absolute
sendEvent := func(relativePath string) {
path := filepath.Join(absLibPath, relativePath)
eventChan <- path
}
// Helper to start the real event processing loop
startEventProcessing := func() {
go func() {
defer GinkgoRecover()
// Call the actual processLibraryEvents method - testing the real implementation!
_ = w.processLibraryEvents(ctx, lib, mockFS, eventChan, absLibPath)
}()
}
Context("when a folder matching .ndignore is deleted", func() {
BeforeEach(func() {
// Create filesystem with .ndignore containing _TEMP pattern
// The deleted folder (_TEMP) will NOT exist in the filesystem
mockFS = &mockMusicFS{
FS: fstest.MapFS{
"rock": &fstest.MapFile{Mode: fs.ModeDir},
"rock/.ndignore": &fstest.MapFile{Data: []byte("_TEMP\n")},
"rock/valid_album": &fstest.MapFile{Mode: fs.ModeDir},
"rock/valid_album/track.mp3": &fstest.MapFile{Data: []byte("audio")},
},
}
})
It("should NOT send scan notification when deleted folder matches .ndignore", func() {
startEventProcessing()
// Simulate deletion event for rock/_TEMP
sendEvent("rock/_TEMP")
// Wait a bit to ensure event is processed
time.Sleep(50 * time.Millisecond)
// No notification should have been sent
Consistently(eventChan, 100*time.Millisecond).Should(BeEmpty())
})
It("should send scan notification for valid folder deletion", func() {
startEventProcessing()
// Simulate deletion event for rock/other_folder (not in .ndignore and doesn't exist)
// Since it doesn't exist in mockFS, resolveFolderPath will walk up to "rock"
sendEvent("rock/other_folder")
// Should receive notification for parent folder
Eventually(w.watcherNotify, 200*time.Millisecond).Should(Receive(Equal(scanNotification{
Library: lib,
FolderPath: "rock",
})))
})
})
Context("with nested folder patterns", func() {
BeforeEach(func() {
mockFS = &mockMusicFS{
FS: fstest.MapFS{
"music": &fstest.MapFile{Mode: fs.ModeDir},
"music/.ndignore": &fstest.MapFile{Data: []byte("**/temp\n**/cache\n")},
"music/rock": &fstest.MapFile{Mode: fs.ModeDir},
"music/rock/artist": &fstest.MapFile{Mode: fs.ModeDir},
},
}
})
It("should NOT send notification when nested ignored folder is deleted", func() {
startEventProcessing()
// Simulate deletion of music/rock/artist/temp (matches **/temp)
sendEvent("music/rock/artist/temp")
// Wait to ensure event is processed
time.Sleep(50 * time.Millisecond)
// No notification should be sent
Expect(w.watcherNotify).To(BeEmpty(), "Expected no scan notification for nested ignored folder")
})
It("should send notification for non-ignored nested folder", func() {
startEventProcessing()
// Simulate change in music/rock/artist (doesn't match any pattern)
sendEvent("music/rock/artist")
// Should receive notification
Eventually(w.watcherNotify, 200*time.Millisecond).Should(Receive(Equal(scanNotification{
Library: lib,
FolderPath: "music/rock/artist",
})))
})
})
Context("with file events in ignored folders", func() {
BeforeEach(func() {
mockFS = &mockMusicFS{
FS: fstest.MapFS{
"rock": &fstest.MapFile{Mode: fs.ModeDir},
"rock/.ndignore": &fstest.MapFile{Data: []byte("_TEMP\n")},
},
}
})
It("should NOT send notification for file changes in ignored folders", func() {
startEventProcessing()
// Simulate file change in rock/_TEMP/file.mp3
sendEvent("rock/_TEMP/file.mp3")
// Wait to ensure event is processed
time.Sleep(50 * time.Millisecond)
// No notification should be sent
Expect(w.watcherNotify).To(BeEmpty(), "Expected no scan notification for file in ignored folder")
})
})
})
})
var _ = Describe("resolveFolderPath", func() {
var mockFS fs.FS
BeforeEach(func() {
// Create a mock filesystem with some directories and files
mockFS = fstest.MapFS{
"artist1": &fstest.MapFile{Mode: fs.ModeDir},
"artist1/album1": &fstest.MapFile{Mode: fs.ModeDir},
"artist1/album1/track1.mp3": &fstest.MapFile{Data: []byte("audio")},
"artist1/album1/track2.mp3": &fstest.MapFile{Data: []byte("audio")},
"artist1/album2": &fstest.MapFile{Mode: fs.ModeDir},
"artist1/album2/song.flac": &fstest.MapFile{Data: []byte("audio")},
"artist2": &fstest.MapFile{Mode: fs.ModeDir},
"artist2/cover.jpg": &fstest.MapFile{Data: []byte("image")},
}
})
It("returns directory path when given a directory", func() {
result := resolveFolderPath(mockFS, "artist1/album1")
Expect(result).To(Equal("artist1/album1"))
})
It("walks up to parent directory when given a file path", func() {
result := resolveFolderPath(mockFS, "artist1/album1/track1.mp3")
Expect(result).To(Equal("artist1/album1"))
})
It("walks up multiple levels if needed", func() {
result := resolveFolderPath(mockFS, "artist1/album1/nonexistent/file.mp3")
Expect(result).To(Equal("artist1/album1"))
})
It("returns empty string for non-existent paths at root", func() {
result := resolveFolderPath(mockFS, "nonexistent/path/file.mp3")
Expect(result).To(Equal(""))
})
It("returns empty string for dot path", func() {
result := resolveFolderPath(mockFS, ".")
Expect(result).To(Equal(""))
})
It("returns empty string for empty path", func() {
result := resolveFolderPath(mockFS, "")
Expect(result).To(Equal(""))
})
It("handles nested file paths correctly", func() {
result := resolveFolderPath(mockFS, "artist1/album2/song.flac")
Expect(result).To(Equal("artist1/album2"))
})
It("resolves to top-level directory", func() {
result := resolveFolderPath(mockFS, "artist2/cover.jpg")
Expect(result).To(Equal("artist2"))
})
})

View File

@ -29,7 +29,7 @@ var _ = Describe("Config API", func() {
conf.Server.DevUIShowConfig = true // Enable config endpoint for tests
ds = &tests.MockDataStore{}
auth.Init(ds)
nativeRouter := New(ds, nil, nil, nil, core.NewMockLibraryService())
nativeRouter := New(ds, nil, nil, nil, core.NewMockLibraryService(), nil)
router = server.JWTVerifier(nativeRouter)
// Create test users

View File

@ -13,11 +13,11 @@ import (
)
// User-library association endpoints (admin only)
func (n *Router) addUserLibraryRoute(r chi.Router) {
func (api *Router) addUserLibraryRoute(r chi.Router) {
r.Route("/user/{id}/library", func(r chi.Router) {
r.Use(parseUserIDMiddleware)
r.Get("/", getUserLibraries(n.libs))
r.Put("/", setUserLibraries(n.libs))
r.Get("/", getUserLibraries(api.libs))
r.Put("/", setUserLibraries(api.libs))
})
}

View File

@ -30,7 +30,7 @@ var _ = Describe("Library API", func() {
DeferCleanup(configtest.SetupConfig())
ds = &tests.MockDataStore{}
auth.Init(ds)
nativeRouter := New(ds, nil, nil, nil, core.NewMockLibraryService())
nativeRouter := New(ds, nil, nil, nil, core.NewMockLibraryService(), nil)
router = server.JWTVerifier(nativeRouter)
// Create test users

View File

@ -8,9 +8,9 @@ import (
"github.com/Masterminds/squirrel"
"github.com/deluan/rest"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/utils/req"
)
@ -63,45 +63,32 @@ func (r *missingRepository) EntityName() string {
return "missing_files"
}
func deleteMissingFiles(ds model.DataStore, w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
p := req.Params(r)
ids, _ := p.Strings("id")
err := ds.WithTx(func(tx model.DataStore) error {
func deleteMissingFiles(maintenance core.Maintenance) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
p := req.Params(r)
ids, _ := p.Strings("id")
var err error
if len(ids) == 0 {
_, err := tx.MediaFile(ctx).DeleteAllMissing()
return err
}
return tx.MediaFile(ctx).DeleteMissing(ids)
})
if len(ids) == 1 && errors.Is(err, model.ErrNotFound) {
log.Warn(ctx, "Missing file not found", "id", ids[0])
http.Error(w, "not found", http.StatusNotFound)
return
}
if err != nil {
log.Error(ctx, "Error deleting missing tracks from DB", "ids", ids, err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
err = ds.GC(ctx)
if err != nil {
log.Error(ctx, "Error running GC after deleting missing tracks", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Refresh artist stats in background after deleting missing files
go func() {
bgCtx := request.AddValues(context.Background(), r.Context())
if _, err := ds.Artist(bgCtx).RefreshStats(true); err != nil {
log.Error(bgCtx, "Error refreshing artist stats after deleting missing files", err)
err = maintenance.DeleteAllMissingFiles(ctx)
} else {
log.Debug(bgCtx, "Successfully refreshed artist stats after deleting missing files")
err = maintenance.DeleteMissingFiles(ctx, ids)
}
}()
writeDeleteManyResponse(w, r, ids)
if len(ids) == 1 && errors.Is(err, model.ErrNotFound) {
log.Warn(ctx, "Missing file not found", "id", ids[0])
http.Error(w, "not found", http.StatusNotFound)
return
}
if err != nil {
http.Error(w, "failed to delete missing files", http.StatusInternalServerError)
return
}
writeDeleteManyResponse(w, r, ids)
}
}
var _ model.ResourceRepository = &missingRepository{}

View File

@ -22,70 +22,71 @@ import (
type Router struct {
http.Handler
ds model.DataStore
share core.Share
playlists core.Playlists
insights metrics.Insights
libs core.Library
ds model.DataStore
share core.Share
playlists core.Playlists
insights metrics.Insights
libs core.Library
maintenance core.Maintenance
}
func New(ds model.DataStore, share core.Share, playlists core.Playlists, insights metrics.Insights, libraryService core.Library) *Router {
r := &Router{ds: ds, share: share, playlists: playlists, insights: insights, libs: libraryService}
func New(ds model.DataStore, share core.Share, playlists core.Playlists, insights metrics.Insights, libraryService core.Library, maintenance core.Maintenance) *Router {
r := &Router{ds: ds, share: share, playlists: playlists, insights: insights, libs: libraryService, maintenance: maintenance}
r.Handler = r.routes()
return r
}
func (n *Router) routes() http.Handler {
func (api *Router) routes() http.Handler {
r := chi.NewRouter()
// Public
n.RX(r, "/translation", newTranslationRepository, false)
api.RX(r, "/translation", newTranslationRepository, false)
// Protected
r.Group(func(r chi.Router) {
r.Use(server.Authenticator(n.ds))
r.Use(server.Authenticator(api.ds))
r.Use(server.JWTRefresher)
r.Use(server.UpdateLastAccessMiddleware(n.ds))
n.R(r, "/user", model.User{}, true)
n.R(r, "/song", model.MediaFile{}, false)
n.R(r, "/album", model.Album{}, false)
n.R(r, "/artist", model.Artist{}, false)
n.R(r, "/genre", model.Genre{}, false)
n.R(r, "/player", model.Player{}, true)
n.R(r, "/transcoding", model.Transcoding{}, conf.Server.EnableTranscodingConfig)
n.R(r, "/radio", model.Radio{}, true)
n.R(r, "/tag", model.Tag{}, true)
r.Use(server.UpdateLastAccessMiddleware(api.ds))
api.R(r, "/user", model.User{}, true)
api.R(r, "/song", model.MediaFile{}, false)
api.R(r, "/album", model.Album{}, false)
api.R(r, "/artist", model.Artist{}, false)
api.R(r, "/genre", model.Genre{}, false)
api.R(r, "/player", model.Player{}, true)
api.R(r, "/transcoding", model.Transcoding{}, conf.Server.EnableTranscodingConfig)
api.R(r, "/radio", model.Radio{}, true)
api.R(r, "/tag", model.Tag{}, true)
if conf.Server.EnableSharing {
n.RX(r, "/share", n.share.NewRepository, true)
api.RX(r, "/share", api.share.NewRepository, true)
}
n.addPlaylistRoute(r)
n.addPlaylistTrackRoute(r)
n.addSongPlaylistsRoute(r)
n.addQueueRoute(r)
n.addMissingFilesRoute(r)
n.addKeepAliveRoute(r)
n.addInsightsRoute(r)
api.addPlaylistRoute(r)
api.addPlaylistTrackRoute(r)
api.addSongPlaylistsRoute(r)
api.addQueueRoute(r)
api.addMissingFilesRoute(r)
api.addKeepAliveRoute(r)
api.addInsightsRoute(r)
r.With(adminOnlyMiddleware).Group(func(r chi.Router) {
n.addInspectRoute(r)
n.addConfigRoute(r)
n.addUserLibraryRoute(r)
n.RX(r, "/library", n.libs.NewRepository, true)
api.addInspectRoute(r)
api.addConfigRoute(r)
api.addUserLibraryRoute(r)
api.RX(r, "/library", api.libs.NewRepository, true)
})
})
return r
}
func (n *Router) R(r chi.Router, pathPrefix string, model interface{}, persistable bool) {
func (api *Router) R(r chi.Router, pathPrefix string, model interface{}, persistable bool) {
constructor := func(ctx context.Context) rest.Repository {
return n.ds.Resource(ctx, model)
return api.ds.Resource(ctx, model)
}
n.RX(r, pathPrefix, constructor, persistable)
api.RX(r, pathPrefix, constructor, persistable)
}
func (n *Router) RX(r chi.Router, pathPrefix string, constructor rest.RepositoryConstructor, persistable bool) {
func (api *Router) RX(r chi.Router, pathPrefix string, constructor rest.RepositoryConstructor, persistable bool) {
r.Route(pathPrefix, func(r chi.Router) {
r.Get("/", rest.GetAll(constructor))
if persistable {
@ -102,9 +103,9 @@ func (n *Router) RX(r chi.Router, pathPrefix string, constructor rest.Repository
})
}
func (n *Router) addPlaylistRoute(r chi.Router) {
func (api *Router) addPlaylistRoute(r chi.Router) {
constructor := func(ctx context.Context) rest.Repository {
return n.ds.Resource(ctx, model.Playlist{})
return api.ds.Resource(ctx, model.Playlist{})
}
r.Route("/playlist", func(r chi.Router) {
@ -114,7 +115,7 @@ func (n *Router) addPlaylistRoute(r chi.Router) {
rest.Post(constructor)(w, r)
return
}
createPlaylistFromM3U(n.playlists)(w, r)
createPlaylistFromM3U(api.playlists)(w, r)
})
r.Route("/{id}", func(r chi.Router) {
@ -126,55 +127,53 @@ func (n *Router) addPlaylistRoute(r chi.Router) {
})
}
func (n *Router) addPlaylistTrackRoute(r chi.Router) {
func (api *Router) addPlaylistTrackRoute(r chi.Router) {
r.Route("/playlist/{playlistId}/tracks", func(r chi.Router) {
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
getPlaylist(n.ds)(w, r)
getPlaylist(api.ds)(w, r)
})
r.With(server.URLParamsMiddleware).Route("/", func(r chi.Router) {
r.Delete("/", func(w http.ResponseWriter, r *http.Request) {
deleteFromPlaylist(n.ds)(w, r)
deleteFromPlaylist(api.ds)(w, r)
})
r.Post("/", func(w http.ResponseWriter, r *http.Request) {
addToPlaylist(n.ds)(w, r)
addToPlaylist(api.ds)(w, r)
})
})
r.Route("/{id}", func(r chi.Router) {
r.Use(server.URLParamsMiddleware)
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
getPlaylistTrack(n.ds)(w, r)
getPlaylistTrack(api.ds)(w, r)
})
r.Put("/", func(w http.ResponseWriter, r *http.Request) {
reorderItem(n.ds)(w, r)
reorderItem(api.ds)(w, r)
})
r.Delete("/", func(w http.ResponseWriter, r *http.Request) {
deleteFromPlaylist(n.ds)(w, r)
deleteFromPlaylist(api.ds)(w, r)
})
})
})
}
func (n *Router) addSongPlaylistsRoute(r chi.Router) {
func (api *Router) addSongPlaylistsRoute(r chi.Router) {
r.With(server.URLParamsMiddleware).Get("/song/{id}/playlists", func(w http.ResponseWriter, r *http.Request) {
getSongPlaylists(n.ds)(w, r)
getSongPlaylists(api.ds)(w, r)
})
}
func (n *Router) addQueueRoute(r chi.Router) {
func (api *Router) addQueueRoute(r chi.Router) {
r.Route("/queue", func(r chi.Router) {
r.Get("/", getQueue(n.ds))
r.Post("/", saveQueue(n.ds))
r.Put("/", updateQueue(n.ds))
r.Delete("/", clearQueue(n.ds))
r.Get("/", getQueue(api.ds))
r.Post("/", saveQueue(api.ds))
r.Put("/", updateQueue(api.ds))
r.Delete("/", clearQueue(api.ds))
})
}
func (n *Router) addMissingFilesRoute(r chi.Router) {
func (api *Router) addMissingFilesRoute(r chi.Router) {
r.Route("/missing", func(r chi.Router) {
n.RX(r, "/", newMissingRepository(n.ds), false)
r.Delete("/", func(w http.ResponseWriter, r *http.Request) {
deleteMissingFiles(n.ds, w, r)
})
api.RX(r, "/", newMissingRepository(api.ds), false)
r.Delete("/", deleteMissingFiles(api.maintenance))
})
}
@ -198,7 +197,7 @@ func writeDeleteManyResponse(w http.ResponseWriter, r *http.Request, ids []strin
}
}
func (n *Router) addInspectRoute(r chi.Router) {
func (api *Router) addInspectRoute(r chi.Router) {
if conf.Server.Inspect.Enabled {
r.Group(func(r chi.Router) {
if conf.Server.Inspect.MaxRequests > 0 {
@ -207,26 +206,26 @@ func (n *Router) addInspectRoute(r chi.Router) {
conf.Server.Inspect.BacklogTimeout)
r.Use(middleware.ThrottleBacklog(conf.Server.Inspect.MaxRequests, conf.Server.Inspect.BacklogLimit, time.Duration(conf.Server.Inspect.BacklogTimeout)))
}
r.Get("/inspect", inspect(n.ds))
r.Get("/inspect", inspect(api.ds))
})
}
}
func (n *Router) addConfigRoute(r chi.Router) {
func (api *Router) addConfigRoute(r chi.Router) {
if conf.Server.DevUIShowConfig {
r.Get("/config/*", getConfig)
}
}
func (n *Router) addKeepAliveRoute(r chi.Router) {
func (api *Router) addKeepAliveRoute(r chi.Router) {
r.Get("/keepalive/*", func(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte(`{"response":"ok", "id":"keepalive"}`))
})
}
func (n *Router) addInsightsRoute(r chi.Router) {
func (api *Router) addInsightsRoute(r chi.Router) {
r.Get("/insights/*", func(w http.ResponseWriter, r *http.Request) {
last, success := n.insights.LastRun(r.Context())
last, success := api.insights.LastRun(r.Context())
if conf.Server.EnableInsightsCollector {
_, _ = w.Write([]byte(`{"id":"insights_status", "lastRun":"` + last.Format("2006-01-02 15:04:05") + `", "success":` + strconv.FormatBool(success) + `}`))
} else {

View File

@ -95,7 +95,7 @@ var _ = Describe("Song Endpoints", func() {
mfRepo.SetData(testSongs)
// Create the native API router and wrap it with the JWTVerifier middleware
nativeRouter := New(ds, nil, nil, nil, core.NewMockLibraryService())
nativeRouter := New(ds, nil, nil, nil, core.NewMockLibraryService(), nil)
router = server.JWTVerifier(nativeRouter)
w = httptest.NewRecorder()
})

Some files were not shown because too many files have changed in this diff Show More