node build fixed

This commit is contained in:
ra_ma
2025-09-20 14:08:38 +01:00
parent c6ebbe069d
commit 3d298fa434
1516 changed files with 535727 additions and 2 deletions

View File

@@ -0,0 +1,129 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/hook_resolver"
"seanime/internal/library/anime"
)
// ScanStartedEvent is triggered when the scanning process begins.
// Prevent default to skip the rest of the scanning process and return the local files.
type ScanStartedEvent struct {
hook_resolver.Event
// The main directory to scan
LibraryPath string `json:"libraryPath"`
// Other directories to scan
OtherLibraryPaths []string `json:"otherLibraryPaths"`
// Whether to use enhanced scanning,
// Enhanced scanning will fetch media from AniList based on the local files' titles,
// and use the metadata to match the local files.
Enhanced bool `json:"enhanced"`
// Whether to skip locked files
SkipLocked bool `json:"skipLocked"`
// Whether to skip ignored files
SkipIgnored bool `json:"skipIgnored"`
// All previously scanned local files
LocalFiles []*anime.LocalFile `json:"localFiles"`
}
// ScanFilePathsRetrievedEvent is triggered when the file paths to scan are retrieved.
// The event includes file paths from all directories to scan.
// The event includes file paths of local files that will be skipped.
type ScanFilePathsRetrievedEvent struct {
hook_resolver.Event
FilePaths []string `json:"filePaths"`
}
// ScanLocalFilesParsedEvent is triggered right after the file paths are parsed into local file objects.
// The event does not include local files that are skipped.
type ScanLocalFilesParsedEvent struct {
hook_resolver.Event
LocalFiles []*anime.LocalFile `json:"localFiles"`
}
// ScanCompletedEvent is triggered when the scanning process finishes.
// The event includes all the local files (skipped and scanned) to be inserted as a new entry.
// Right after this event, the local files will be inserted as a new entry.
type ScanCompletedEvent struct {
hook_resolver.Event
LocalFiles []*anime.LocalFile `json:"localFiles"`
Duration int `json:"duration"` // in milliseconds
}
// ScanMediaFetcherStartedEvent is triggered right before Seanime starts fetching media to be matched against the local files.
type ScanMediaFetcherStartedEvent struct {
hook_resolver.Event
// Whether to use enhanced scanning.
// Enhanced scanning will fetch media from AniList based on the local files' titles,
// and use the metadata to match the local files.
Enhanced bool `json:"enhanced"`
}
// ScanMediaFetcherCompletedEvent is triggered when the media fetcher completes.
// The event includes all the media fetched from AniList.
// The event includes the media IDs that are not in the user's collection.
type ScanMediaFetcherCompletedEvent struct {
hook_resolver.Event
// All media fetched from AniList, to be matched against the local files.
AllMedia []*anilist.CompleteAnime `json:"allMedia"`
// Media IDs that are not in the user's collection.
UnknownMediaIds []int `json:"unknownMediaIds"`
}
// ScanMatchingStartedEvent is triggered when the matching process begins.
// Prevent default to skip the default matching, in which case modified local files will be used.
type ScanMatchingStartedEvent struct {
hook_resolver.Event
// Local files to be matched.
// If default is prevented, these local files will be used.
LocalFiles []*anime.LocalFile `json:"localFiles"`
// Media to be matched against the local files.
NormalizedMedia []*anime.NormalizedMedia `json:"normalizedMedia"`
// Matching algorithm.
Algorithm string `json:"algorithm"`
// Matching threshold.
Threshold float64 `json:"threshold"`
}
// ScanLocalFileMatchedEvent is triggered when a local file is matched with media and before the match is analyzed.
// Prevent default to skip the default analysis and override the match.
type ScanLocalFileMatchedEvent struct {
hook_resolver.Event
// Can be nil if there's no match
Match *anime.NormalizedMedia `json:"match"`
Found bool `json:"found"`
LocalFile *anime.LocalFile `json:"localFile"`
Score float64 `json:"score"`
}
// ScanMatchingCompletedEvent is triggered when the matching process completes.
type ScanMatchingCompletedEvent struct {
hook_resolver.Event
LocalFiles []*anime.LocalFile `json:"localFiles"`
}
// ScanHydrationStartedEvent is triggered when the file hydration process begins.
// Prevent default to skip the rest of the hydration process, in which case the event's local files will be used.
type ScanHydrationStartedEvent struct {
hook_resolver.Event
// Local files to be hydrated.
LocalFiles []*anime.LocalFile `json:"localFiles"`
// Media to be hydrated.
AllMedia []*anime.NormalizedMedia `json:"allMedia"`
}
// ScanLocalFileHydrationStartedEvent is triggered when a local file's metadata is about to be hydrated.
// Prevent default to skip the default hydration and override the hydration.
type ScanLocalFileHydrationStartedEvent struct {
hook_resolver.Event
LocalFile *anime.LocalFile `json:"localFile"`
Media *anime.NormalizedMedia `json:"media"`
}
// ScanLocalFileHydratedEvent is triggered when a local file's metadata is hydrated
type ScanLocalFileHydratedEvent struct {
hook_resolver.Event
LocalFile *anime.LocalFile `json:"localFile"`
MediaId int `json:"mediaId"`
Episode int `json:"episode"`
}

View File

@@ -0,0 +1,525 @@
package scanner
import (
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/library/summary"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/comparison"
"seanime/internal/util/limiter"
"strconv"
"time"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"github.com/sourcegraph/conc/pool"
)
// FileHydrator hydrates the metadata of all (matched) LocalFiles.
// LocalFiles should already have their media ID hydrated.
type FileHydrator struct {
LocalFiles []*anime.LocalFile // Local files to hydrate
AllMedia []*anime.NormalizedMedia // All media used to hydrate local files
CompleteAnimeCache *anilist.CompleteAnimeCache
Platform platform.Platform
MetadataProvider metadata.Provider
AnilistRateLimiter *limiter.Limiter
Logger *zerolog.Logger
ScanLogger *ScanLogger // optional
ScanSummaryLogger *summary.ScanSummaryLogger // optional
ForceMediaId int // optional - force all local files to have this media ID
}
// HydrateMetadata will hydrate the metadata of each LocalFile with the metadata of the matched anilist.BaseAnime.
// It will divide the LocalFiles into groups based on their media ID and process each group in parallel.
func (fh *FileHydrator) HydrateMetadata() {
start := time.Now()
rateLimiter := limiter.NewLimiter(5*time.Second, 20)
fh.Logger.Debug().Msg("hydrator: Starting metadata hydration")
// Invoke ScanHydrationStarted hook
event := &ScanHydrationStartedEvent{
LocalFiles: fh.LocalFiles,
AllMedia: fh.AllMedia,
}
_ = hook.GlobalHookManager.OnScanHydrationStarted().Trigger(event)
fh.LocalFiles = event.LocalFiles
fh.AllMedia = event.AllMedia
// Default prevented, do not hydrate the metadata
if event.DefaultPrevented {
return
}
// Group local files by media ID
groups := lop.GroupBy(fh.LocalFiles, func(localFile *anime.LocalFile) int {
return localFile.MediaId
})
// Remove the group with unmatched media
delete(groups, 0)
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.InfoLevel).
Int("entryCount", len(groups)).
Msg("Starting metadata hydration process")
}
// Process each group in parallel
p := pool.New()
for mId, files := range groups {
p.Go(func() {
if len(files) > 0 {
fh.hydrateGroupMetadata(mId, files, rateLimiter)
}
})
}
p.Wait()
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.InfoLevel).
Int64("ms", time.Since(start).Milliseconds()).
Msg("Finished metadata hydration")
}
}
func (fh *FileHydrator) hydrateGroupMetadata(
mId int,
lfs []*anime.LocalFile, // Grouped local files
rateLimiter *limiter.Limiter,
) {
// Get the media
media, found := lo.Find(fh.AllMedia, func(media *anime.NormalizedMedia) bool {
return media.ID == mId
})
if !found {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.ErrorLevel).
Int("mediaId", mId).
Msg("Could not find media in FileHydrator options")
}
return
}
// Tree contains media relations
tree := anilist.NewCompleteAnimeRelationTree()
// Tree analysis used for episode normalization
var mediaTreeAnalysis *MediaTreeAnalysis
treeFetched := false
// Process each local file in the group sequentially
lo.ForEach(lfs, func(lf *anime.LocalFile, index int) {
defer util.HandlePanicInModuleThenS("scanner/hydrator/hydrateGroupMetadata", func(stackTrace string) {
lf.MediaId = 0
/*Log*/
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.ErrorLevel).
Str("filename", lf.Name).
Msg("Panic occurred, file un-matched")
}
fh.ScanSummaryLogger.LogPanic(lf, stackTrace)
})
episode := -1
// Invoke ScanLocalFileHydrationStarted hook
event := &ScanLocalFileHydrationStartedEvent{
LocalFile: lf,
Media: media,
}
_ = hook.GlobalHookManager.OnScanLocalFileHydrationStarted().Trigger(event)
lf = event.LocalFile
media = event.Media
defer func() {
// Invoke ScanLocalFileHydrated hook
event := &ScanLocalFileHydratedEvent{
LocalFile: lf,
MediaId: mId,
Episode: episode,
}
_ = hook.GlobalHookManager.OnScanLocalFileHydrated().Trigger(event)
lf = event.LocalFile
mId = event.MediaId
episode = event.Episode
}()
// Handle hook override
if event.DefaultPrevented {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.DebugLevel).
Str("filename", lf.Name).
Msg("Default hydration skipped by hook")
}
fh.ScanSummaryLogger.LogDebug(lf, "Default hydration skipped by hook")
return
}
lf.Metadata.Type = anime.LocalFileTypeMain
// Get episode number
if len(lf.ParsedData.Episode) > 0 {
if ep, ok := util.StringToInt(lf.ParsedData.Episode); ok {
episode = ep
}
}
// NC metadata
if comparison.ValueContainsNC(lf.Name) {
lf.Metadata.Episode = 0
lf.Metadata.AniDBEpisode = ""
lf.Metadata.Type = anime.LocalFileTypeNC
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as NC")
}
fh.ScanSummaryLogger.LogMetadataNC(lf)
return
}
// Special metadata
if comparison.ValueContainsSpecial(lf.Name) {
lf.Metadata.Type = anime.LocalFileTypeSpecial
if episode > -1 {
// ep14 (13 original) -> ep1 s1
if episode > media.GetCurrentEpisodeCount() {
lf.Metadata.Episode = episode - media.GetCurrentEpisodeCount()
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(episode-media.GetCurrentEpisodeCount())
} else {
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(episode)
}
} else {
lf.Metadata.Episode = 1
lf.Metadata.AniDBEpisode = "S1"
}
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as special")
}
fh.ScanSummaryLogger.LogMetadataSpecial(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Movie metadata
if *media.Format == anilist.MediaFormatMovie {
lf.Metadata.Episode = 1
lf.Metadata.AniDBEpisode = "1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// No absolute episode count
// "media.GetTotalEpisodeCount() == -1" is a fix for media with unknown episode count, we will just assume that the episode number is correct
// TODO: We might want to fetch the media when the episode count is unknown in order to get the correct episode count
if episode > -1 && (episode <= media.GetCurrentEpisodeCount() || media.GetTotalEpisodeCount() == -1) {
// Episode 0 - Might be a special
// By default, we will assume that AniDB doesn't include Episode 0 as part of the main episodes (which is often the case)
// If this proves to be wrong, media_entry.go will offset the AniDBEpisode by 1 and treat "S1" as "1" when it is a main episode
if episode == 0 {
// Leave episode number as 0, assuming that the client will handle tracking correctly
lf.Metadata.Episode = 0
lf.Metadata.AniDBEpisode = "S1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataEpisodeZero(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Episode number is higher but media only has 1 episode
// - Might be a movie that was not correctly identified as such
// - Or, the torrent files were divided into multiple episodes from a media that is listed as a movie on AniList
if episode > media.GetCurrentEpisodeCount() && media.GetTotalEpisodeCount() == 1 {
lf.Metadata.Episode = 1 // Coerce episode number to 1 because it is used for tracking
lf.Metadata.AniDBEpisode = "1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Str("warning", "File's episode number is higher than the media's episode count, but the media only has 1 episode").
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// No episode number, but the media only has 1 episode
if episode == -1 && media.GetCurrentEpisodeCount() == 1 {
lf.Metadata.Episode = 1 // Coerce episode number to 1 because it is used for tracking
lf.Metadata.AniDBEpisode = "1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Str("warning", "No episode number found, but the media only has 1 episode").
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Still no episode number and the media has more than 1 episode and is not a movie
// We will mark it as a special episode
if episode == -1 {
lf.Metadata.Type = anime.LocalFileTypeSpecial
lf.Metadata.Episode = 1
lf.Metadata.AniDBEpisode = "S1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.ErrorLevel, lf, mId, episode).
Msg("No episode number found, file has been marked as special")
}
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("no episode number found"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Absolute episode count
if episode > media.GetCurrentEpisodeCount() && fh.ForceMediaId == 0 {
if !treeFetched {
mediaTreeFetchStart := time.Now()
// Fetch media tree
// The media tree will be used to normalize episode numbers
if err := media.FetchMediaTree(anilist.FetchMediaTreeAll, fh.Platform.GetAnilistClient(), fh.AnilistRateLimiter, tree, fh.CompleteAnimeCache); err == nil {
// Create a new media tree analysis that will be used for episode normalization
mta, _ := NewMediaTreeAnalysis(&MediaTreeAnalysisOptions{
tree: tree,
metadataProvider: fh.MetadataProvider,
rateLimiter: rateLimiter,
})
// Hoist the media tree analysis, so it will be used by other files
// We don't care if it's nil because [normalizeEpisodeNumberAndHydrate] will handle it
mediaTreeAnalysis = mta
treeFetched = true
/*Log */
if mta != nil && mta.branches != nil {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.DebugLevel).
Int("mediaId", mId).
Int64("ms", time.Since(mediaTreeFetchStart).Milliseconds()).
Int("requests", len(mediaTreeAnalysis.branches)).
Any("branches", mediaTreeAnalysis.printBranches()).
Msg("Media tree fetched")
}
fh.ScanSummaryLogger.LogMetadataMediaTreeFetched(lf, time.Since(mediaTreeFetchStart).Milliseconds(), len(mediaTreeAnalysis.branches))
}
} else {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.ErrorLevel).
Int("mediaId", mId).
Str("error", err.Error()).
Int64("ms", time.Since(mediaTreeFetchStart).Milliseconds()).
Msg("Could not fetch media tree")
}
fh.ScanSummaryLogger.LogMetadataMediaTreeFetchFailed(lf, err, time.Since(mediaTreeFetchStart).Milliseconds())
}
}
// Normalize episode number
if err := fh.normalizeEpisodeNumberAndHydrate(mediaTreeAnalysis, lf, episode, media.GetCurrentEpisodeCount()); err != nil {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Dict("mediaTreeAnalysis", zerolog.Dict().
Bool("normalized", false).
Str("error", err.Error()).
Str("reason", "Episode normalization failed"),
).
Msg("File has been marked as special")
}
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, err, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
} else {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Dict("mediaTreeAnalysis", zerolog.Dict().
Bool("normalized", true).
Bool("hasNewMediaId", lf.MediaId != mId).
Int("newMediaId", lf.MediaId),
).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataEpisodeNormalized(lf, mId, episode, lf.Metadata.Episode, lf.MediaId, lf.Metadata.AniDBEpisode)
}
return
}
// Absolute episode count with forced media ID
if fh.ForceMediaId != 0 && episode > media.GetCurrentEpisodeCount() {
// When we encounter a file with an episode number higher than the media's episode count
// we have a forced media ID, we will fetch the media from AniList and get the offset
animeMetadata, err := fh.MetadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, fh.ForceMediaId)
if err != nil {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.ErrorLevel, lf, mId, episode).
Str("error", err.Error()).
Msg("Could not fetch AniDB metadata")
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("could not fetch AniDB metadata"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Get the first episode to calculate the offset
firstEp, ok := animeMetadata.Episodes["1"]
if !ok {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.ErrorLevel, lf, mId, episode).
Msg("Could not find absolute episode offset")
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("could not find absolute episode offset"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// ref: media_tree_analysis.go
usePartEpisodeNumber := firstEp.EpisodeNumber > 1 && firstEp.AbsoluteEpisodeNumber-firstEp.EpisodeNumber > 1
minPartAbsoluteEpisodeNumber := 0
maxPartAbsoluteEpisodeNumber := 0
if usePartEpisodeNumber {
minPartAbsoluteEpisodeNumber = firstEp.EpisodeNumber
maxPartAbsoluteEpisodeNumber = minPartAbsoluteEpisodeNumber + animeMetadata.GetMainEpisodeCount() - 1
}
absoluteEpisodeNumber := firstEp.AbsoluteEpisodeNumber
// Calculate the relative episode number
relativeEp := episode
// Let's say the media has 12 episodes and the file is "episode 13"
// If the [partAbsoluteEpisodeNumber] is 13, then the [relativeEp] will be 1, we can safely ignore the [absoluteEpisodeNumber]
// e.g. 13 - (13-1) = 1
if minPartAbsoluteEpisodeNumber <= episode && maxPartAbsoluteEpisodeNumber >= episode {
relativeEp = episode - (minPartAbsoluteEpisodeNumber - 1)
} else {
// Let's say the media has 12 episodes and the file is "episode 38"
// The [absoluteEpisodeNumber] will be 38 and the [relativeEp] will be 1
// e.g. 38 - (38-1) = 1
relativeEp = episode - (absoluteEpisodeNumber - 1)
}
if relativeEp < 1 {
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Dict("normalization", zerolog.Dict().
Bool("normalized", false).
Str("reason", "Episode normalization failed, could not find relative episode number"),
).
Msg("File has been marked as main")
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("could not find relative episode number"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, relativeEp).
Dict("mediaTreeAnalysis", zerolog.Dict().
Bool("normalized", true).
Int("forcedMediaId", fh.ForceMediaId),
).
Msg("File has been marked as main")
}
lf.Metadata.Episode = relativeEp
lf.Metadata.AniDBEpisode = strconv.Itoa(relativeEp)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
})
}
func (fh *FileHydrator) logFileHydration(level zerolog.Level, lf *anime.LocalFile, mId int, episode int) *zerolog.Event {
return fh.ScanLogger.LogFileHydrator(level).
Str("filename", lf.Name).
Int("mediaId", mId).
Dict("vars", zerolog.Dict().
Str("parsedEpisode", lf.ParsedData.Episode).
Int("episode", episode),
).
Dict("metadata", zerolog.Dict().
Int("episode", lf.Metadata.Episode).
Str("aniDBEpisode", lf.Metadata.AniDBEpisode))
}
// normalizeEpisodeNumberAndHydrate will normalize the episode number and hydrate the metadata of the LocalFile.
// If the MediaTreeAnalysis is nil, the episode number will not be normalized.
func (fh *FileHydrator) normalizeEpisodeNumberAndHydrate(
mta *MediaTreeAnalysis,
lf *anime.LocalFile,
ep int, // The absolute episode number of the media
maxEp int, // The maximum episode number of the media
) error {
// No media tree analysis
if mta == nil {
diff := ep - maxEp // e.g. 14 - 12 = 2
// Let's consider this a special episode (it might not exist on AniDB, but it's better than setting everything to "S1")
lf.Metadata.Episode = diff // e.g. 2
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(diff) // e.g. S2
lf.Metadata.Type = anime.LocalFileTypeSpecial
return errors.New("[hydrator] could not find media tree")
}
relativeEp, mediaId, ok := mta.getRelativeEpisodeNumber(ep)
if !ok {
diff := ep - maxEp // e.g. 14 - 12 = 2
// Do the same as above
lf.Metadata.Episode = diff
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(diff) // e.g. S2
lf.Metadata.Type = anime.LocalFileTypeSpecial
return errors.New("[hydrator] could not find relative episode number from media tree")
}
lf.Metadata.Episode = relativeEp
lf.Metadata.AniDBEpisode = strconv.Itoa(relativeEp)
lf.MediaId = mediaId
return nil
}

View File

@@ -0,0 +1,122 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
)
func TestFileHydrator_HydrateMetadata(t *testing.T) {
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
logger := util.NewLogger()
metadataProvider := metadata.GetMockProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
allMedia := animeCollection.GetAllAnime()
tests := []struct {
name string
paths []string
expectedMediaId int
}{
{
name: "should be hydrated with id 131586",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: 131586, // 86 - Eighty Six Part 2
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, "E:/Anime")
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
for _, nm := range mc.NormalizedMedia {
t.Logf("media id: %d, title: %s", nm.ID, nm.GetTitleSafe())
}
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: nil,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
}
err = matcher.MatchLocalFilesWithMedia()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | FileHydrator |
// +---------------------+
fh := &FileHydrator{
LocalFiles: lfs,
AllMedia: mc.NormalizedMedia,
CompleteAnimeCache: completeAnimeCache,
Platform: anilistPlatform,
AnilistRateLimiter: anilistRateLimiter,
MetadataProvider: metadataProvider,
Logger: logger,
ScanLogger: scanLogger,
}
fh.HydrateMetadata()
for _, lf := range fh.LocalFiles {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
})
}
}

View File

@@ -0,0 +1,3 @@
package scanner
// .seaignore

View File

@@ -0,0 +1,28 @@
package scanner
import (
"github.com/rs/zerolog"
lop "github.com/samber/lo/parallel"
"seanime/internal/library/anime"
"seanime/internal/library/filesystem"
)
// GetLocalFilesFromDir creates a new LocalFile for each video file
func GetLocalFilesFromDir(dirPath string, logger *zerolog.Logger) ([]*anime.LocalFile, error) {
paths, err := filesystem.GetMediaFilePathsFromDirS(dirPath)
logger.Trace().
Any("dirPath", dirPath).
Msg("localfile: Retrieving and creating local files")
// Concurrently populate localFiles
localFiles := lop.Map(paths, func(path string, index int) *anime.LocalFile {
return anime.NewLocalFile(path, dirPath)
})
logger.Trace().
Any("count", len(localFiles)).
Msg("localfile: Retrieved local files")
return localFiles, err
}

View File

@@ -0,0 +1,21 @@
package scanner
import (
"github.com/stretchr/testify/assert"
"seanime/internal/util"
"testing"
)
func TestGetLocalFilesFromDir(t *testing.T) {
t.Skip("Skipping test that requires local files")
var dir = "E:/Anime"
logger := util.NewLogger()
localFiles, err := GetLocalFilesFromDir(dir, logger)
if assert.NoError(t, err) {
t.Logf("Found %d local files", len(localFiles))
}
}

View File

@@ -0,0 +1,552 @@
package scanner
import (
"errors"
"fmt"
"math"
"seanime/internal/api/anilist"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/library/summary"
"seanime/internal/util"
"seanime/internal/util/comparison"
"time"
"github.com/adrg/strutil/metrics"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"github.com/sourcegraph/conc/pool"
)
type Matcher struct {
LocalFiles []*anime.LocalFile
MediaContainer *MediaContainer
CompleteAnimeCache *anilist.CompleteAnimeCache
Logger *zerolog.Logger
ScanLogger *ScanLogger
ScanSummaryLogger *summary.ScanSummaryLogger // optional
Algorithm string
Threshold float64
}
var (
ErrNoLocalFiles = errors.New("[matcher] no local files")
)
// MatchLocalFilesWithMedia will match each anime.LocalFile with a specific anilist.BaseAnime and modify the LocalFile's `mediaId`
func (m *Matcher) MatchLocalFilesWithMedia() error {
if m.Threshold == 0 {
m.Threshold = 0.5
}
start := time.Now()
if len(m.LocalFiles) == 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).Msg("No local files")
}
return ErrNoLocalFiles
}
if len(m.MediaContainer.allMedia) == 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).Msg("No media fed into the matcher")
}
return errors.New("[matcher] no media fed into the matcher")
}
m.Logger.Debug().Msg("matcher: Starting matching process")
// Invoke ScanMatchingStarted hook
event := &ScanMatchingStartedEvent{
LocalFiles: m.LocalFiles,
NormalizedMedia: m.MediaContainer.NormalizedMedia,
Algorithm: m.Algorithm,
Threshold: m.Threshold,
}
_ = hook.GlobalHookManager.OnScanMatchingStarted().Trigger(event)
m.LocalFiles = event.LocalFiles
m.MediaContainer.NormalizedMedia = event.NormalizedMedia
m.Algorithm = event.Algorithm
m.Threshold = event.Threshold
if event.DefaultPrevented {
m.Logger.Debug().Msg("matcher: Match stopped by hook")
return nil
}
// Parallelize the matching process
lop.ForEach(m.LocalFiles, func(localFile *anime.LocalFile, _ int) {
m.matchLocalFileWithMedia(localFile)
})
// m.validateMatches()
// Invoke ScanMatchingCompleted hook
completedEvent := &ScanMatchingCompletedEvent{
LocalFiles: m.LocalFiles,
}
_ = hook.GlobalHookManager.OnScanMatchingCompleted().Trigger(completedEvent)
m.LocalFiles = completedEvent.LocalFiles
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.InfoLevel).
Int64("ms", time.Since(start).Milliseconds()).
Int("files", len(m.LocalFiles)).
Int("unmatched", lo.CountBy(m.LocalFiles, func(localFile *anime.LocalFile) bool {
return localFile.MediaId == 0
})).
Msg("Finished matching process")
}
return nil
}
// matchLocalFileWithMedia finds the best match for the local file
// If the best match is above a certain threshold, set the local file's mediaId to the best match's id
// If the best match is below a certain threshold, leave the local file's mediaId to 0
func (m *Matcher) matchLocalFileWithMedia(lf *anime.LocalFile) {
defer util.HandlePanicInModuleThenS("scanner/matcher/matchLocalFileWithMedia", func(stackTrace string) {
lf.MediaId = 0
/*Log*/
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.ErrorLevel).
Str("filename", lf.Name).
Msg("Panic occurred, file un-matched")
}
m.ScanSummaryLogger.LogPanic(lf, stackTrace)
})
// Check if the local file has already been matched
if lf.MediaId != 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Msg("File already matched")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "Already matched")
return
}
// Check if the local file has a title
if lf.GetParsedTitle() == "" {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).
Str("filename", lf.Name).
Msg("File has no parsed title")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "No parsed title found")
return
}
// Create title variations
// Check cache for title variation
titleVariations := lf.GetTitleVariations()
if len(titleVariations) == 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).
Str("filename", lf.Name).
Msg("No titles found")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "No title variations found")
return
}
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("titleVariations", titleVariations).
Msg("Matching local file")
}
m.ScanSummaryLogger.LogDebug(lf, util.InlineSpewT(titleVariations))
//------------------
var levMatch *comparison.LevenshteinResult
var sdMatch *comparison.SorensenDiceResult
var jaccardMatch *comparison.JaccardResult
if m.Algorithm == "jaccard" {
// Using Jaccard
// Get the matchs for each title variation
compResults := lop.Map(titleVariations, func(title *string, _ int) *comparison.JaccardResult {
comps := make([]*comparison.JaccardResult, 0)
if len(m.MediaContainer.engTitles) > 0 {
if eng, found := comparison.FindBestMatchWithJaccard(title, m.MediaContainer.engTitles); found {
comps = append(comps, eng)
}
}
if len(m.MediaContainer.romTitles) > 0 {
if rom, found := comparison.FindBestMatchWithJaccard(title, m.MediaContainer.romTitles); found {
comps = append(comps, rom)
}
}
if len(m.MediaContainer.synonyms) > 0 {
if syn, found := comparison.FindBestMatchWithJaccard(title, m.MediaContainer.synonyms); found {
comps = append(comps, syn)
}
}
var res *comparison.JaccardResult
if len(comps) > 1 {
res = lo.Reduce(comps, func(prev *comparison.JaccardResult, curr *comparison.JaccardResult, _ int) *comparison.JaccardResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, comps[0])
} else if len(comps) == 1 {
return comps[0]
}
return res
})
// Retrieve the match from all the title variations results
jaccardMatch = lo.Reduce(compResults, func(prev *comparison.JaccardResult, curr *comparison.JaccardResult, _ int) *comparison.JaccardResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, compResults[0])
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("match", jaccardMatch).
Interface("results", compResults).
Msg("Jaccard match")
}
m.ScanSummaryLogger.LogComparison(lf, "Jaccard", *jaccardMatch.Value, "Rating", util.InlineSpewT(jaccardMatch.Rating))
} else if m.Algorithm == "sorensen-dice" {
// Using Sorensen-Dice
// Get the matchs for each title variation
compResults := lop.Map(titleVariations, func(title *string, _ int) *comparison.SorensenDiceResult {
comps := make([]*comparison.SorensenDiceResult, 0)
if len(m.MediaContainer.engTitles) > 0 {
if eng, found := comparison.FindBestMatchWithSorensenDice(title, m.MediaContainer.engTitles); found {
comps = append(comps, eng)
}
}
if len(m.MediaContainer.romTitles) > 0 {
if rom, found := comparison.FindBestMatchWithSorensenDice(title, m.MediaContainer.romTitles); found {
comps = append(comps, rom)
}
}
if len(m.MediaContainer.synonyms) > 0 {
if syn, found := comparison.FindBestMatchWithSorensenDice(title, m.MediaContainer.synonyms); found {
comps = append(comps, syn)
}
}
var res *comparison.SorensenDiceResult
if len(comps) > 1 {
res = lo.Reduce(comps, func(prev *comparison.SorensenDiceResult, curr *comparison.SorensenDiceResult, _ int) *comparison.SorensenDiceResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, comps[0])
} else if len(comps) == 1 {
return comps[0]
}
return res
})
// Retrieve the match from all the title variations results
sdMatch = lo.Reduce(compResults, func(prev *comparison.SorensenDiceResult, curr *comparison.SorensenDiceResult, _ int) *comparison.SorensenDiceResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, compResults[0])
//util.Spew(compResults)
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("match", sdMatch).
Interface("results", compResults).
Msg("Sorensen-Dice match")
}
m.ScanSummaryLogger.LogComparison(lf, "Sorensen-Dice", *sdMatch.Value, "Rating", util.InlineSpewT(sdMatch.Rating))
} else {
// Using Levenshtein
// Get the matches for each title variation
levCompResults := lop.Map(titleVariations, func(title *string, _ int) *comparison.LevenshteinResult {
comps := make([]*comparison.LevenshteinResult, 0)
if len(m.MediaContainer.engTitles) > 0 {
if eng, found := comparison.FindBestMatchWithLevenshtein(title, m.MediaContainer.engTitles); found {
comps = append(comps, eng)
}
}
if len(m.MediaContainer.romTitles) > 0 {
if rom, found := comparison.FindBestMatchWithLevenshtein(title, m.MediaContainer.romTitles); found {
comps = append(comps, rom)
}
}
if len(m.MediaContainer.synonyms) > 0 {
if syn, found := comparison.FindBestMatchWithLevenshtein(title, m.MediaContainer.synonyms); found {
comps = append(comps, syn)
}
}
var res *comparison.LevenshteinResult
if len(comps) > 1 {
res = lo.Reduce(comps, func(prev *comparison.LevenshteinResult, curr *comparison.LevenshteinResult, _ int) *comparison.LevenshteinResult {
if prev.Distance < curr.Distance {
return prev
} else {
return curr
}
}, comps[0])
} else if len(comps) == 1 {
return comps[0]
}
return res
})
levMatch = lo.Reduce(levCompResults, func(prev *comparison.LevenshteinResult, curr *comparison.LevenshteinResult, _ int) *comparison.LevenshteinResult {
if prev.Distance < curr.Distance {
return prev
} else {
return curr
}
}, levCompResults[0])
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("match", levMatch).
Interface("results", levCompResults).
Int("distance", levMatch.Distance).
Msg("Levenshtein match")
}
m.ScanSummaryLogger.LogComparison(lf, "Levenshtein", *levMatch.Value, "Distance", util.InlineSpewT(levMatch.Distance))
}
//------------------
var mediaMatch *anime.NormalizedMedia
var found bool
finalRating := 0.0
if sdMatch != nil {
finalRating = sdMatch.Rating
mediaMatch, found = m.MediaContainer.GetMediaFromTitleOrSynonym(sdMatch.Value)
} else if jaccardMatch != nil {
finalRating = jaccardMatch.Rating
mediaMatch, found = m.MediaContainer.GetMediaFromTitleOrSynonym(jaccardMatch.Value)
} else {
dice := metrics.NewSorensenDice()
dice.CaseSensitive = false
dice.NgramSize = 1
finalRating = dice.Compare(*levMatch.OriginalValue, *levMatch.Value)
m.ScanSummaryLogger.LogComparison(lf, "Sorensen-Dice", *levMatch.Value, "Final rating", util.InlineSpewT(finalRating))
mediaMatch, found = m.MediaContainer.GetMediaFromTitleOrSynonym(levMatch.Value)
}
// After setting the mediaId, add the hook invocation
// Invoke ScanLocalFileMatched hook
event := &ScanLocalFileMatchedEvent{
LocalFile: lf,
Score: finalRating,
Match: mediaMatch,
Found: found,
}
hook.GlobalHookManager.OnScanLocalFileMatched().Trigger(event)
lf = event.LocalFile
mediaMatch = event.Match
found = event.Found
finalRating = event.Score
// Check if the hook overrode the match
if event.DefaultPrevented {
if m.ScanLogger != nil {
if mediaMatch != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Int("id", mediaMatch.ID).
Msg("Hook overrode match")
} else {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Msg("Hook overrode match, no match found")
}
}
if mediaMatch != nil {
lf.MediaId = mediaMatch.ID
} else {
lf.MediaId = 0
}
return
}
if !found {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.ErrorLevel).
Str("filename", lf.Name).
Msg("No media found from comparison result")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "No media found from comparison result")
return
}
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Str("title", mediaMatch.GetTitleSafe()).
Int("id", mediaMatch.ID).
Msg("Best match found")
}
if finalRating < m.Threshold {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Float64("rating", finalRating).
Float64("threshold", m.Threshold).
Msg("Best match Sorensen-Dice rating too low, un-matching file")
}
m.ScanSummaryLogger.LogFailedMatch(lf, "Rating too low, threshold is "+fmt.Sprintf("%f", m.Threshold))
return
}
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Float64("rating", finalRating).
Float64("threshold", m.Threshold).
Msg("Best match rating high enough, matching file")
}
m.ScanSummaryLogger.LogSuccessfullyMatched(lf, mediaMatch.ID)
lf.MediaId = mediaMatch.ID
}
//----------------------------------------------------------------------------------------------------------------------
// validateMatches compares groups of local files' titles with the media titles and un-matches the local files that have a lower rating than the highest rating.
func (m *Matcher) validateMatches() {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.InfoLevel).Msg("Validating matches")
}
// Group local files by media ID
groups := lop.GroupBy(m.LocalFiles, func(localFile *anime.LocalFile) int {
return localFile.MediaId
})
// Remove the group with unmatched media
delete(groups, 0)
// Un-match files with lower ratings
p := pool.New()
for mId, files := range groups {
p.Go(func() {
if len(files) > 0 {
m.validateMatchGroup(mId, files)
}
})
}
p.Wait()
}
// validateMatchGroup compares the local files' titles under the same media
// with the media titles and un-matches the local files that have a lower rating.
// This is done to try and filter out wrong matches.
func (m *Matcher) validateMatchGroup(mediaId int, lfs []*anime.LocalFile) {
media, found := m.MediaContainer.GetMediaFromId(mediaId)
if !found {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.ErrorLevel).
Int("mediaId", mediaId).
Msg("Media not found in media container")
}
return
}
titles := media.GetAllTitles()
// Compare all files' parsed title with the media title
// Get the highest rating that will be used to un-match lower rated files
p := pool.NewWithResults[float64]()
for _, lf := range lfs {
p.Go(func() float64 {
t := lf.GetParsedTitle()
if comparison.ValueContainsSpecial(lf.Name) || comparison.ValueContainsNC(lf.Name) {
return 0
}
compRes, ok := comparison.FindBestMatchWithSorensenDice(&t, titles)
if ok {
return compRes.Rating
}
return 0
})
}
fileRatings := p.Wait()
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Int("mediaId", mediaId).
Any("fileRatings", fileRatings).
Msg("File ratings")
}
highestRating := lo.Reduce(fileRatings, func(prev float64, curr float64, _ int) float64 {
if prev > curr {
return prev
} else {
return curr
}
}, 0.0)
// Un-match files that have a lower rating than the ceiling
// UNLESS they are Special or NC
lop.ForEach(lfs, func(lf *anime.LocalFile, _ int) {
if !comparison.ValueContainsSpecial(lf.Name) && !comparison.ValueContainsNC(lf.Name) {
t := lf.GetParsedTitle()
if compRes, ok := comparison.FindBestMatchWithSorensenDice(&t, titles); ok {
// If the local file's rating is lower, un-match it
// Unless the difference is less than 0.7 (very lax since a lot of anime have very long names that can be truncated)
if compRes.Rating < highestRating && math.Abs(compRes.Rating-highestRating) > 0.7 {
lf.MediaId = 0
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).
Int("mediaId", mediaId).
Str("filename", lf.Name).
Float64("rating", compRes.Rating).
Float64("highestRating", highestRating).
Msg("Rating does not match parameters, un-matching file")
}
m.ScanSummaryLogger.LogUnmatched(lf, fmt.Sprintf("Rating does not match parameters. File rating: %f, highest rating: %f", compRes.Rating, highestRating))
} else {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Int("mediaId", mediaId).
Str("filename", lf.Name).
Float64("rating", compRes.Rating).
Float64("highestRating", highestRating).
Msg("Rating matches parameters, keeping file matched")
}
m.ScanSummaryLogger.LogMatchValidated(lf, mediaId)
}
}
}
})
}

View File

@@ -0,0 +1,244 @@
package scanner
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
)
// Add more media to this file if needed
// scanner_test_mock_data.json
func TestMatcher_MatchLocalFileWithMedia(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), nil)
if err != nil {
t.Fatal(err.Error())
}
allMedia := animeCollection.GetAllAnime()
dir := "E:/Anime"
tests := []struct {
name string
paths []string
expectedMediaId int
}{
{
// These local files are from "86 - Eighty Six Part 2" but should be matched with "86 - Eighty Six Part 1"
// because there is no indication for the part. However, the FileHydrator will fix this issue.
name: "should match with media id 116589",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: 116589, // 86 - Eighty Six Part 1
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: nil,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
}
err = matcher.MatchLocalFilesWithMedia()
if assert.NoError(t, err, "Error while matching local files") {
for _, lf := range lfs {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
}
})
}
}
func TestMatcher_MatchLocalFileWithMedia2(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt)
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
if err != nil {
t.Fatal(err.Error())
}
dir := "E:/Anime"
tests := []struct {
name string
paths []string
expectedMediaId int
otherMediaIds []int
}{
{
name: "Kono Subarashii Sekai ni Shukufuku wo! - 21202",
paths: []string{
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo!/Kono Subarashii Sekai ni Shukufuku wo! (01-10) [1080p] (Batch)/[HorribleSubs] Kono Subarashii Sekai ni Shukufuku wo! - 01 [1080p].mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo!/Kono Subarashii Sekai ni Shukufuku wo! (01-10) [1080p] (Batch)/[HorribleSubs] Kono Subarashii Sekai ni Shukufuku wo! - 02 [1080p].mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo!/Kono Subarashii Sekai ni Shukufuku wo! (01-10) [1080p] (Batch)/[HorribleSubs] Kono Subarashii Sekai ni Shukufuku wo! - 03 [1080p].mkv",
},
expectedMediaId: 21202, //
},
{
name: "Kono Subarashii Sekai ni Shukufuku wo! 2 - 21699",
paths: []string{
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo! 2/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02E01.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA.mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo! 2/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02E02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA.mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo! 2/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02E03.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA.mkv",
},
expectedMediaId: 21699,
},
{
name: "Demon Slayer: Kimetsu no Yaiba Entertainment District Arc - 142329",
paths: []string{
"E:/Anime/Kimetsu no Yaiba Yuukaku-hen/[Salieri] Demon Slayer - Kimetsu No Yaiba - S3 - Entertainment District - BD (1080P) (HDR) [Dual-Audio]/[Salieri] Demon Slayer S3 - Kimetsu No Yaiba- Entertainment District - 03 (1080P) (HDR) [Dual-Audio].mkv",
},
expectedMediaId: 142329,
},
{
name: "KnY 145139",
paths: []string{
"E:/Anime/Kimetsu no Yaiba Katanakaji no Sato-hen/Demon Slayer S03 1080p Dual Audio BDRip 10 bits DD x265-EMBER/S03E07-Awful Villain [703A5C5B].mkv",
},
expectedMediaId: 145139,
},
{
name: "MT 108465",
paths: []string{
"E:/Anime/Mushoku Tensei Isekai Ittara Honki Dasu/Mushoku Tensei S01+SP 1080p Dual Audio BDRip 10 bits DDP x265-EMBER/Mushoku Tensei S01P01 1080p Dual Audio BDRip 10 bits DD x265-EMBER/S01E01-Jobless Reincarnation V2 [911C3607].mkv",
},
expectedMediaId: 108465,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Add media to collection if it doesn't exist
allMedia := animeCollection.GetAllAnime()
hasExpectedMediaId := false
for _, media := range allMedia {
if media.ID == tt.expectedMediaId {
hasExpectedMediaId = true
break
}
}
if !hasExpectedMediaId {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, tt.expectedMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: lo.ToPtr(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
for _, otherMediaId := range tt.otherMediaIds {
hasOtherMediaId := false
for _, media := range allMedia {
if media.ID == otherMediaId {
hasOtherMediaId = true
break
}
}
if !hasOtherMediaId {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, otherMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: lo.ToPtr(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
}
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: nil,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
}
err = matcher.MatchLocalFilesWithMedia()
if assert.NoError(t, err, "Error while matching local files") {
for _, lf := range lfs {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
}
})
}
}

View File

@@ -0,0 +1,146 @@
package scanner
import (
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"seanime/internal/api/anilist"
"seanime/internal/library/anime"
"seanime/internal/util/comparison"
"strings"
)
type (
MediaContainerOptions struct {
AllMedia []*anilist.CompleteAnime
ScanLogger *ScanLogger
}
MediaContainer struct {
NormalizedMedia []*anime.NormalizedMedia
ScanLogger *ScanLogger
engTitles []*string
romTitles []*string
synonyms []*string
allMedia []*anilist.CompleteAnime
}
)
// NewMediaContainer will create a list of all English titles, Romaji titles, and synonyms from all anilist.BaseAnime (used by Matcher).
//
// The list will include all anilist.BaseAnime and their relations (prequels, sequels, spin-offs, etc...) as NormalizedMedia.
//
// It also provides helper functions to get a NormalizedMedia from a title or synonym (used by FileHydrator).
func NewMediaContainer(opts *MediaContainerOptions) *MediaContainer {
mc := new(MediaContainer)
mc.ScanLogger = opts.ScanLogger
mc.NormalizedMedia = make([]*anime.NormalizedMedia, 0)
normalizedMediaMap := make(map[int]*anime.NormalizedMedia)
for _, m := range opts.AllMedia {
normalizedMediaMap[m.ID] = anime.NewNormalizedMedia(m.ToBaseAnime())
if m.Relations != nil && m.Relations.Edges != nil && len(m.Relations.Edges) > 0 {
for _, edgeM := range m.Relations.Edges {
if edgeM.Node == nil || edgeM.Node.Format == nil || edgeM.RelationType == nil {
continue
}
if *edgeM.Node.Format != anilist.MediaFormatMovie &&
*edgeM.Node.Format != anilist.MediaFormatOva &&
*edgeM.Node.Format != anilist.MediaFormatSpecial &&
*edgeM.Node.Format != anilist.MediaFormatTv {
continue
}
if *edgeM.RelationType != anilist.MediaRelationPrequel &&
*edgeM.RelationType != anilist.MediaRelationSequel &&
*edgeM.RelationType != anilist.MediaRelationSpinOff &&
*edgeM.RelationType != anilist.MediaRelationAlternative &&
*edgeM.RelationType != anilist.MediaRelationParent {
continue
}
// DEVNOTE: Edges fetched from the AniList AnimeCollection query do not contain NextAiringEpisode
// Make sure we don't overwrite the original media in the map that contains NextAiringEpisode
if _, found := normalizedMediaMap[edgeM.Node.ID]; !found {
normalizedMediaMap[edgeM.Node.ID] = anime.NewNormalizedMedia(edgeM.Node)
}
}
}
}
for _, m := range normalizedMediaMap {
mc.NormalizedMedia = append(mc.NormalizedMedia, m)
}
engTitles := lop.Map(mc.NormalizedMedia, func(m *anime.NormalizedMedia, index int) *string {
if m.Title.English != nil {
return m.Title.English
}
return new(string)
})
romTitles := lop.Map(mc.NormalizedMedia, func(m *anime.NormalizedMedia, index int) *string {
if m.Title.Romaji != nil {
return m.Title.Romaji
}
return new(string)
})
_synonymsArr := lop.Map(mc.NormalizedMedia, func(m *anime.NormalizedMedia, index int) []*string {
if m.Synonyms != nil {
return m.Synonyms
}
return make([]*string, 0)
})
synonyms := lo.Flatten(_synonymsArr)
engTitles = lo.Filter(engTitles, func(s *string, i int) bool { return s != nil && len(*s) > 0 })
romTitles = lo.Filter(romTitles, func(s *string, i int) bool { return s != nil && len(*s) > 0 })
synonyms = lo.Filter(synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
mc.engTitles = engTitles
mc.romTitles = romTitles
mc.synonyms = synonyms
mc.allMedia = opts.AllMedia
if mc.ScanLogger != nil {
mc.ScanLogger.LogMediaContainer(zerolog.InfoLevel).
Any("inputCount", len(opts.AllMedia)).
Any("mediaCount", len(mc.NormalizedMedia)).
Any("titles", len(mc.engTitles)+len(mc.romTitles)+len(mc.synonyms)).
Msg("Created media container")
}
return mc
}
func (mc *MediaContainer) GetMediaFromTitleOrSynonym(title *string) (*anime.NormalizedMedia, bool) {
if title == nil {
return nil, false
}
t := strings.ToLower(*title)
res, found := lo.Find(mc.NormalizedMedia, func(m *anime.NormalizedMedia) bool {
if m.HasEnglishTitle() && t == strings.ToLower(*m.Title.English) {
return true
}
if m.HasRomajiTitle() && t == strings.ToLower(*m.Title.Romaji) {
return true
}
if m.HasSynonyms() {
for _, syn := range m.Synonyms {
if t == strings.ToLower(*syn) {
return true
}
}
}
return false
})
return res, found
}
func (mc *MediaContainer) GetMediaFromId(id int) (*anime.NormalizedMedia, bool) {
res, found := lo.Find(mc.NormalizedMedia, func(m *anime.NormalizedMedia) bool {
if m.ID == id {
return true
}
return false
})
return res, found
}

View File

@@ -0,0 +1,352 @@
package scanner
import (
"context"
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/mal"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"seanime/internal/util/parallel"
"time"
"github.com/davecgh/go-spew/spew"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
)
// MediaFetcher holds all anilist.BaseAnime that will be used for the comparison process
type MediaFetcher struct {
AllMedia []*anilist.CompleteAnime
CollectionMediaIds []int
UnknownMediaIds []int // Media IDs that are not in the user's collection
AnimeCollectionWithRelations *anilist.AnimeCollectionWithRelations
ScanLogger *ScanLogger
}
type MediaFetcherOptions struct {
Enhanced bool
Platform platform.Platform
MetadataProvider metadata.Provider
LocalFiles []*anime.LocalFile
CompleteAnimeCache *anilist.CompleteAnimeCache
Logger *zerolog.Logger
AnilistRateLimiter *limiter.Limiter
DisableAnimeCollection bool
ScanLogger *ScanLogger
}
// NewMediaFetcher
// Calling this method will kickstart the fetch process
// When enhancing is false, MediaFetcher.AllMedia will be all anilist.BaseAnime from the user's AniList collection.
// When enhancing is true, MediaFetcher.AllMedia will be anilist.BaseAnime for each unique, parsed anime title and their relations.
func NewMediaFetcher(ctx context.Context, opts *MediaFetcherOptions) (ret *MediaFetcher, retErr error) {
defer util.HandlePanicInModuleWithError("library/scanner/NewMediaFetcher", &retErr)
if opts.Platform == nil ||
opts.LocalFiles == nil ||
opts.CompleteAnimeCache == nil ||
opts.MetadataProvider == nil ||
opts.Logger == nil ||
opts.AnilistRateLimiter == nil {
return nil, errors.New("missing options")
}
mf := new(MediaFetcher)
mf.ScanLogger = opts.ScanLogger
opts.Logger.Debug().
Any("enhanced", opts.Enhanced).
Msg("media fetcher: Creating media fetcher")
if mf.ScanLogger != nil {
mf.ScanLogger.LogMediaFetcher(zerolog.InfoLevel).
Msg("Creating media fetcher")
}
// Invoke ScanMediaFetcherStarted hook
event := &ScanMediaFetcherStartedEvent{
Enhanced: opts.Enhanced,
}
hook.GlobalHookManager.OnScanMediaFetcherStarted().Trigger(event)
opts.Enhanced = event.Enhanced
// +---------------------+
// | All media |
// +---------------------+
// Fetch latest user's AniList collection
animeCollectionWithRelations, err := opts.Platform.GetAnimeCollectionWithRelations(ctx)
if err != nil {
return nil, err
}
mf.AnimeCollectionWithRelations = animeCollectionWithRelations
mf.AllMedia = make([]*anilist.CompleteAnime, 0)
if !opts.DisableAnimeCollection {
// For each collection entry, append the media to AllMedia
for _, list := range animeCollectionWithRelations.GetMediaListCollection().GetLists() {
for _, entry := range list.GetEntries() {
mf.AllMedia = append(mf.AllMedia, entry.GetMedia())
// +---------------------+
// | Cache |
// +---------------------+
// We assume the CompleteAnimeCache is empty. Add media to cache.
opts.CompleteAnimeCache.Set(entry.GetMedia().ID, entry.GetMedia())
}
}
}
if mf.ScanLogger != nil {
mf.ScanLogger.LogMediaFetcher(zerolog.DebugLevel).
Int("count", len(mf.AllMedia)).
Msg("Fetched media from AniList collection")
}
//--------------------------------------------
// Get the media IDs from the collection
mf.CollectionMediaIds = lop.Map(mf.AllMedia, func(m *anilist.CompleteAnime, index int) int {
return m.ID
})
//--------------------------------------------
// +---------------------+
// | Enhanced |
// +---------------------+
// If enhancing is on, scan media from local files and get their relations
if opts.Enhanced {
_, ok := FetchMediaFromLocalFiles(
ctx,
opts.Platform,
opts.LocalFiles,
opts.CompleteAnimeCache, // CompleteAnimeCache will be populated on success
opts.MetadataProvider,
opts.AnilistRateLimiter,
mf.ScanLogger,
)
if ok {
// We assume the CompleteAnimeCache is populated. We overwrite AllMedia with the cache content.
// This is because the cache will contain all media from the user's collection AND scanned ones
mf.AllMedia = make([]*anilist.CompleteAnime, 0)
opts.CompleteAnimeCache.Range(func(key int, value *anilist.CompleteAnime) bool {
mf.AllMedia = append(mf.AllMedia, value)
return true
})
}
}
// +---------------------+
// | Unknown media |
// +---------------------+
// Media that are not in the user's collection
// Get the media that are not in the user's collection
unknownMedia := lo.Filter(mf.AllMedia, func(m *anilist.CompleteAnime, _ int) bool {
return !lo.Contains(mf.CollectionMediaIds, m.ID)
})
// Get the media IDs that are not in the user's collection
mf.UnknownMediaIds = lop.Map(unknownMedia, func(m *anilist.CompleteAnime, _ int) int {
return m.ID
})
if mf.ScanLogger != nil {
mf.ScanLogger.LogMediaFetcher(zerolog.DebugLevel).
Int("unknownMediaCount", len(mf.UnknownMediaIds)).
Int("allMediaCount", len(mf.AllMedia)).
Msg("Finished creating media fetcher")
}
// Invoke ScanMediaFetcherCompleted hook
completedEvent := &ScanMediaFetcherCompletedEvent{
AllMedia: mf.AllMedia,
UnknownMediaIds: mf.UnknownMediaIds,
}
_ = hook.GlobalHookManager.OnScanMediaFetcherCompleted().Trigger(completedEvent)
mf.AllMedia = completedEvent.AllMedia
mf.UnknownMediaIds = completedEvent.UnknownMediaIds
return mf, nil
}
//----------------------------------------------------------------------------------------------------------------------
// FetchMediaFromLocalFiles gets media and their relations from local file titles.
// It retrieves unique titles from local files,
// fetches mal.SearchResultAnime from MAL,
// uses these search results to get AniList IDs using metadata.AnimeMetadata mappings,
// queries AniList to retrieve all anilist.BaseAnime using anilist.GetBaseAnimeById and their relations using anilist.FetchMediaTree.
// It does not return an error if one of the steps fails.
// It returns the scanned media and a boolean indicating whether the process was successful.
func FetchMediaFromLocalFiles(
ctx context.Context,
platform platform.Platform,
localFiles []*anime.LocalFile,
completeAnime *anilist.CompleteAnimeCache,
metadataProvider metadata.Provider,
anilistRateLimiter *limiter.Limiter,
scanLogger *ScanLogger,
) (ret []*anilist.CompleteAnime, ok bool) {
defer util.HandlePanicInModuleThen("library/scanner/FetchMediaFromLocalFiles", func() {
ok = false
})
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Msg("Fetching media from local files")
}
rateLimiter := limiter.NewLimiter(time.Second, 20)
rateLimiter2 := limiter.NewLimiter(time.Second, 20)
// Get titles
titles := anime.GetUniqueAnimeTitlesFromLocalFiles(localFiles)
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("context", spew.Sprint(titles)).
Msg("Parsed titles from local files")
}
// +---------------------+
// | MyAnimeList |
// +---------------------+
// Get MAL media from titles
malSR := parallel.NewSettledResults[string, *mal.SearchResultAnime](titles)
malSR.AllSettled(func(title string, index int) (*mal.SearchResultAnime, error) {
rateLimiter.Wait()
return mal.AdvancedSearchWithMAL(title)
})
malRes, ok := malSR.GetFulfilledResults()
if !ok {
return nil, false
}
// Get duplicate-free version of MAL media
malMedia := lo.UniqBy(*malRes, func(res *mal.SearchResultAnime) int { return res.ID })
// Get the MAL media IDs
malIds := lop.Map(malMedia, func(n *mal.SearchResultAnime, index int) int { return n.ID })
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("context", spew.Sprint(lo.Map(malMedia, func(n *mal.SearchResultAnime, _ int) string {
return n.Name
}))).
Msg("Fetched MAL media from titles")
}
// +---------------------+
// | Animap |
// +---------------------+
// Get Animap mappings for each MAL ID and store them in `metadataProvider`
// This step is necessary because MAL doesn't provide AniList IDs and some MAL media don't exist on AniList
lop.ForEach(malIds, func(id int, index int) {
rateLimiter2.Wait()
//_, _ = metadataProvider.GetAnimeMetadata(metadata.MalPlatform, id)
_, _ = metadataProvider.GetCache().GetOrSet(metadata.GetAnimeMetadataCacheKey(metadata.MalPlatform, id), func() (*metadata.AnimeMetadata, error) {
res, err := metadataProvider.GetAnimeMetadata(metadata.MalPlatform, id)
return res, err
})
})
// +---------------------+
// | AniList |
// +---------------------+
// Retrieve the AniList IDs from the Animap mappings stored in the cache
anilistIds := make([]int, 0)
metadataProvider.GetCache().Range(func(key string, value *metadata.AnimeMetadata) bool {
if value != nil {
anilistIds = append(anilistIds, value.GetMappings().AnilistId)
}
return true
})
// Fetch all media from the AniList IDs
anilistMedia := make([]*anilist.CompleteAnime, 0)
lop.ForEach(anilistIds, func(id int, index int) {
anilistRateLimiter.Wait()
media, err := platform.GetAnimeWithRelations(ctx, id)
if err == nil {
anilistMedia = append(anilistMedia, media)
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("title", media.GetTitleSafe()).
Msg("Fetched Anilist media from MAL id")
}
} else {
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.WarnLevel).
Str("module", "Enhanced").
Int("id", id).
Msg("Failed to fetch Anilist media from MAL id")
}
}
})
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("context", spew.Sprint(lo.Map(anilistMedia, func(n *anilist.CompleteAnime, _ int) string {
return n.GetTitleSafe()
}))).
Msg("Fetched Anilist media from MAL ids")
}
// +---------------------+
// | MediaTree |
// +---------------------+
// Create a new tree that will hold the fetched relations
// /!\ This is redundant because we already have a cache, but `FetchMediaTree` needs its
tree := anilist.NewCompleteAnimeRelationTree()
start := time.Now()
// For each media, fetch its relations
// The relations are fetched in parallel and added to `completeAnime`
lop.ForEach(anilistMedia, func(m *anilist.CompleteAnime, index int) {
// We ignore errors because we want to continue even if one of the media fails
_ = m.FetchMediaTree(anilist.FetchMediaTreeAll, platform.GetAnilistClient(), anilistRateLimiter, tree, completeAnime)
})
// +---------------------+
// | Cache |
// +---------------------+
// Retrieve all media from the cache
scanned := make([]*anilist.CompleteAnime, 0)
completeAnime.Range(func(key int, value *anilist.CompleteAnime) bool {
scanned = append(scanned, value)
return true
})
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.InfoLevel).
Str("module", "Enhanced").
Int("ms", int(time.Since(start).Milliseconds())).
Int("count", len(scanned)).
Str("context", spew.Sprint(lo.Map(scanned, func(n *anilist.CompleteAnime, _ int) string {
return n.GetTitleSafe()
}))).
Msg("Finished fetching media from local files")
}
return scanned, true
}

View File

@@ -0,0 +1,273 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
)
func TestNewMediaFetcher(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metadataProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
dir := "E:/Anime"
tests := []struct {
name string
paths []string
enhanced bool
disableAnimeCollection bool
}{
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
enhanced: false,
disableAnimeCollection: false,
},
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
enhanced: true,
disableAnimeCollection: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaFetcher |
// +---------------------+
mf, err := NewMediaFetcher(t.Context(), &MediaFetcherOptions{
Enhanced: tt.enhanced,
Platform: anilistPlatform,
LocalFiles: lfs,
CompleteAnimeCache: completeAnimeCache,
MetadataProvider: metadataProvider,
Logger: util.NewLogger(),
AnilistRateLimiter: anilistRateLimiter,
ScanLogger: scanLogger,
DisableAnimeCollection: tt.disableAnimeCollection,
})
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: mf.AllMedia,
ScanLogger: scanLogger,
})
for _, m := range mc.NormalizedMedia {
t.Log(m.GetTitleSafe())
}
})
}
}
func TestNewEnhancedMediaFetcher(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metaProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
dir := "E:/Anime"
tests := []struct {
name string
paths []string
enhanced bool
}{
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
enhanced: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewScanLogger("./logs")
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaFetcher |
// +---------------------+
mf, err := NewMediaFetcher(t.Context(), &MediaFetcherOptions{
Enhanced: tt.enhanced,
Platform: anilistPlatform,
LocalFiles: lfs,
CompleteAnimeCache: completeAnimeCache,
MetadataProvider: metaProvider,
Logger: util.NewLogger(),
AnilistRateLimiter: anilistRateLimiter,
ScanLogger: scanLogger,
})
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: mf.AllMedia,
ScanLogger: scanLogger,
})
for _, m := range mc.NormalizedMedia {
t.Log(m.GetTitleSafe())
}
})
}
}
func TestFetchMediaFromLocalFiles(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metaProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
tests := []struct {
name string
paths []string
expectedMediaId []int
}{
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: []int{116589, 131586}, // 86 - Eighty Six Part 1 & 2
},
}
dir := "E:/Anime"
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewScanLogger("./logs")
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +--------------------------+
// | FetchMediaFromLocalFiles |
// +--------------------------+
media, ok := FetchMediaFromLocalFiles(
t.Context(),
anilistPlatform,
lfs,
completeAnimeCache,
metaProvider,
anilistRateLimiter,
scanLogger,
)
if !ok {
t.Fatal("could not fetch media from local files")
}
ids := lo.Map(media, func(k *anilist.CompleteAnime, _ int) int {
return k.ID
})
// Test if all expected media IDs are present
for _, id := range tt.expectedMediaId {
assert.Contains(t, ids, id)
}
t.Log("Media IDs:")
for _, m := range media {
t.Log(m.GetTitleSafe())
}
})
}
}

View File

@@ -0,0 +1,226 @@
package scanner
import (
"errors"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/util/limiter"
"sort"
"time"
"github.com/samber/lo"
"github.com/sourcegraph/conc/pool"
)
type (
MediaTreeAnalysisOptions struct {
tree *anilist.CompleteAnimeRelationTree
metadataProvider metadata.Provider
rateLimiter *limiter.Limiter
}
MediaTreeAnalysis struct {
branches []*MediaTreeAnalysisBranch
}
MediaTreeAnalysisBranch struct {
media *anilist.CompleteAnime
animeMetadata *metadata.AnimeMetadata
// The second absolute episode number of the first episode
// Sometimes, the metadata provider may have a 'true' absolute episode number and a 'part' absolute episode number
// 'part' absolute episode numbers might be used for "Part 2s" of a season
minPartAbsoluteEpisodeNumber int
maxPartAbsoluteEpisodeNumber int
minAbsoluteEpisode int
maxAbsoluteEpisode int
totalEpisodeCount int
noAbsoluteEpisodesFound bool
}
)
// NewMediaTreeAnalysis will analyze the media tree and create and store a MediaTreeAnalysisBranch for each media in the tree.
// Each MediaTreeAnalysisBranch will contain the min and max absolute episode number for the media.
// The min and max absolute episode numbers are used to get the relative episode number from an absolute episode number.
func NewMediaTreeAnalysis(opts *MediaTreeAnalysisOptions) (*MediaTreeAnalysis, error) {
relations := make([]*anilist.CompleteAnime, 0)
opts.tree.Range(func(key int, value *anilist.CompleteAnime) bool {
relations = append(relations, value)
return true
})
// Get Animap data for all related media in the tree
// With each Animap media, get the min and max absolute episode number
// Create new MediaTreeAnalysisBranch for each Animap media
p := pool.NewWithResults[*MediaTreeAnalysisBranch]().WithErrors()
for _, rel := range relations {
p.Go(func() (*MediaTreeAnalysisBranch, error) {
opts.rateLimiter.Wait()
animeMetadata, err := opts.metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, rel.ID)
if err != nil {
return nil, err
}
// Get the first episode
firstEp, ok := animeMetadata.Episodes["1"]
if !ok {
return nil, errors.New("no first episode")
}
// discrepancy: "seasonNumber":1,"episodeNumber":12,"absoluteEpisodeNumber":13,
// this happens when the media has a separate entry but is technically the same season
// when we detect this, we should use the "episodeNumber" as the absoluteEpisodeNumber
// this is a hacky fix, but it works for the cases I've seen so far
usePartEpisodeNumber := firstEp.EpisodeNumber > 1 && firstEp.AbsoluteEpisodeNumber-firstEp.EpisodeNumber > 1
partAbsoluteEpisodeNumber := 0
maxPartAbsoluteEpisodeNumber := 0
if usePartEpisodeNumber {
partAbsoluteEpisodeNumber = firstEp.EpisodeNumber
maxPartAbsoluteEpisodeNumber = partAbsoluteEpisodeNumber + animeMetadata.GetMainEpisodeCount() - 1
}
// If the first episode exists and has a valid absolute episode number, create a new MediaTreeAnalysisBranch
if animeMetadata.Episodes != nil {
return &MediaTreeAnalysisBranch{
media: rel,
animeMetadata: animeMetadata,
minPartAbsoluteEpisodeNumber: partAbsoluteEpisodeNumber,
maxPartAbsoluteEpisodeNumber: maxPartAbsoluteEpisodeNumber,
minAbsoluteEpisode: firstEp.AbsoluteEpisodeNumber,
// The max absolute episode number is the first episode's absolute episode number plus the total episode count minus 1
// We subtract 1 because the first episode's absolute episode number is already included in the total episode count
// e.g, if the first episode's absolute episode number is 13 and the total episode count is 12, the max absolute episode number is 24
maxAbsoluteEpisode: firstEp.AbsoluteEpisodeNumber + (animeMetadata.GetMainEpisodeCount() - 1),
totalEpisodeCount: animeMetadata.GetMainEpisodeCount(),
noAbsoluteEpisodesFound: firstEp.AbsoluteEpisodeNumber == 0,
}, nil
}
return nil, errors.New("could not analyze media tree branch")
})
}
branches, _ := p.Wait()
if branches == nil || len(branches) == 0 {
return nil, errors.New("no branches found")
}
return &MediaTreeAnalysis{branches: branches}, nil
}
// getRelativeEpisodeNumber uses the MediaTreeAnalysis to get the relative episode number for an absolute episode number
func (o *MediaTreeAnalysis) getRelativeEpisodeNumber(abs int) (relativeEp int, mediaId int, ok bool) {
isPartAbsolute := false
// Find the MediaTreeAnalysisBranch that contains the absolute episode number
branch, ok := lo.Find(o.branches, func(n *MediaTreeAnalysisBranch) bool {
// First check if the partAbsoluteEpisodeNumber is set
if n.minPartAbsoluteEpisodeNumber > 0 && n.maxPartAbsoluteEpisodeNumber > 0 {
// If it is, check if the absolute episode number given is the same as the partAbsoluteEpisodeNumber
// If it is, return true
if n.minPartAbsoluteEpisodeNumber <= abs && n.maxPartAbsoluteEpisodeNumber >= abs {
isPartAbsolute = true
return true
}
}
// Else, check if the absolute episode number given is within the min and max absolute episode numbers of the branch
if n.minAbsoluteEpisode <= abs && n.maxAbsoluteEpisode >= abs {
return true
}
return false
})
if !ok {
// Sort branches manually
type branchByFirstEpDate struct {
branch *MediaTreeAnalysisBranch
firstEpDate time.Time
minAbsoluteEpisode int
maxAbsoluteEpisode int
}
branches := make([]*branchByFirstEpDate, 0)
for _, b := range o.branches {
// Get the first episode date
firstEp, ok := b.animeMetadata.Episodes["1"]
if !ok {
continue
}
// parse date
t, err := time.Parse(time.DateOnly, firstEp.AirDate)
if err != nil {
continue
}
branches = append(branches, &branchByFirstEpDate{
branch: b,
firstEpDate: t,
})
}
// Sort branches by first episode date
// If the first episode date is not available, the branch will be placed at the end
sort.Slice(branches, func(i, j int) bool {
return branches[i].firstEpDate.Before(branches[j].firstEpDate)
})
// Hydrate branches with min and max absolute episode numbers
visited := make(map[int]*branchByFirstEpDate)
for idx, b := range branches {
visited[idx] = b
if v, ok := visited[idx-1]; ok {
b.minAbsoluteEpisode = v.maxAbsoluteEpisode + 1
b.maxAbsoluteEpisode = b.minAbsoluteEpisode + b.branch.totalEpisodeCount - 1
continue
}
b.minAbsoluteEpisode = 1
b.maxAbsoluteEpisode = b.minAbsoluteEpisode + b.branch.totalEpisodeCount - 1
}
for _, b := range branches {
if b.minAbsoluteEpisode <= abs && b.maxAbsoluteEpisode >= abs {
b.branch.minAbsoluteEpisode = b.minAbsoluteEpisode
b.branch.maxAbsoluteEpisode = b.maxAbsoluteEpisode
branch = b.branch
relativeEp = abs - (branch.minAbsoluteEpisode - 1)
mediaId = branch.media.ID
ok = true
return
}
}
return 0, 0, false
}
if isPartAbsolute {
// Let's say the media has 12 episodes and the file is "episode 13"
// If the [partAbsoluteEpisodeNumber] is 13, then the [relativeEp] will be 1, we can safely ignore the [absoluteEpisodeNumber]
// e.g. 13 - (13-1) = 1
relativeEp = abs - (branch.minPartAbsoluteEpisodeNumber - 1)
} else {
// Let's say the media has 12 episodes and the file is "episode 38"
// The [minAbsoluteEpisode] will be 38 and the [relativeEp] will be 1
// e.g. 38 - (38-1) = 1
relativeEp = abs - (branch.minAbsoluteEpisode - 1)
}
mediaId = branch.media.ID
return
}
func (o *MediaTreeAnalysis) printBranches() (str string) {
str = "["
for _, branch := range o.branches {
str += fmt.Sprintf("media: '%s', minAbsoluteEpisode: %d, maxAbsoluteEpisode: %d, totalEpisodeCount: %d; ", branch.media.GetTitleSafe(), branch.minAbsoluteEpisode, branch.maxAbsoluteEpisode, branch.totalEpisodeCount)
}
if len(o.branches) > 0 {
str = str[:len(str)-2]
}
str += "]"
return str
}

View File

@@ -0,0 +1,170 @@
package scanner
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/test_utils"
"seanime/internal/util/limiter"
"testing"
"time"
)
func TestMediaTreeAnalysis(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
anilistRateLimiter := limiter.NewAnilistLimiter()
tree := anilist.NewCompleteAnimeRelationTree()
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mediaId int
absoluteEpisodeNumber int
expectedRelativeEpisodeNumber int
}{
{
name: "Media Tree Analysis for 86 - Eighty Six Part 2",
mediaId: 131586, // 86 - Eighty Six Part 2
absoluteEpisodeNumber: 23,
expectedRelativeEpisodeNumber: 12,
},
{
name: "Oshi no Ko Season 2",
mediaId: 150672, // 86 - Eighty Six Part 2
absoluteEpisodeNumber: 12,
expectedRelativeEpisodeNumber: 1,
},
{
name: "Re:zero",
mediaId: 21355, // Re:Zero kara Hajimeru Isekai Seikatsu
absoluteEpisodeNumber: 51,
expectedRelativeEpisodeNumber: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mediaF, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
if err != nil {
t.Fatal("expected media, got not found")
}
media := mediaF.GetMedia()
// +---------------------+
// | MediaTree |
// +---------------------+
err = media.FetchMediaTree(
anilist.FetchMediaTreeAll,
anilistClient,
anilistRateLimiter,
tree,
anilist.NewCompleteAnimeCache(),
)
if err != nil {
t.Fatal("expected media tree, got error:", err.Error())
}
// +---------------------+
// | MediaTreeAnalysis |
// +---------------------+
mta, err := NewMediaTreeAnalysis(&MediaTreeAnalysisOptions{
tree: tree,
metadataProvider: metadataProvider,
rateLimiter: limiter.NewLimiter(time.Minute, 25),
})
if err != nil {
t.Fatal("expected media tree analysis, got error:", err.Error())
}
// +---------------------+
// | Relative Episode |
// +---------------------+
relEp, _, ok := mta.getRelativeEpisodeNumber(tt.absoluteEpisodeNumber)
if assert.Truef(t, ok, "expected relative episode number %v for absolute episode number %v, nothing found", tt.expectedRelativeEpisodeNumber, tt.absoluteEpisodeNumber) {
assert.Equal(t, tt.expectedRelativeEpisodeNumber, relEp)
}
})
}
}
func TestMediaTreeAnalysis2(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
anilistRateLimiter := limiter.NewAnilistLimiter()
tree := anilist.NewCompleteAnimeRelationTree()
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mediaId int
}{
{
name: "Media Tree Analysis",
mediaId: 375, // Soreyuke! Uchuu Senkan Yamamoto Yohko
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
media, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
if err != nil {
t.Fatal("expected media, got error:", err.Error())
}
// +---------------------+
// | MediaTree |
// +---------------------+
err = media.GetMedia().FetchMediaTree(
anilist.FetchMediaTreeAll,
anilistClient,
anilistRateLimiter,
tree,
anilist.NewCompleteAnimeCache(),
)
if err != nil {
t.Fatal("expected media tree, got error:", err.Error())
}
// +---------------------+
// | MediaTreeAnalysis |
// +---------------------+
mta, err := NewMediaTreeAnalysis(&MediaTreeAnalysisOptions{
tree: tree,
metadataProvider: metadataProvider,
rateLimiter: limiter.NewLimiter(time.Minute, 25),
})
if err != nil {
t.Fatal("expected media tree analysis, got error:", err.Error())
}
t.Log(spew.Sdump(mta))
})
}
}

View File

@@ -0,0 +1,421 @@
package scanner
import (
"context"
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/events"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/library/filesystem"
"seanime/internal/library/summary"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"sync"
"time"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
)
type Scanner struct {
DirPath string
OtherDirPaths []string
Enhanced bool
Platform platform.Platform
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
ExistingLocalFiles []*anime.LocalFile
SkipLockedFiles bool
SkipIgnoredFiles bool
ScanSummaryLogger *summary.ScanSummaryLogger
ScanLogger *ScanLogger
MetadataProvider metadata.Provider
MatchingThreshold float64
MatchingAlgorithm string
}
// Scan will scan the directory and return a list of anime.LocalFile.
func (scn *Scanner) Scan(ctx context.Context) (lfs []*anime.LocalFile, err error) {
defer util.HandlePanicWithError(&err)
go func() {
anime.EpisodeCollectionFromLocalFilesCache.Clear()
}()
scn.WSEventManager.SendEvent(events.EventScanProgress, 0)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Retrieving local files...")
completeAnimeCache := anilist.NewCompleteAnimeCache()
// Create a new Anilist rate limiter
anilistRateLimiter := limiter.NewAnilistLimiter()
if scn.ScanSummaryLogger == nil {
scn.ScanSummaryLogger = summary.NewScanSummaryLogger()
}
scn.Logger.Debug().Msg("scanner: Starting scan")
scn.WSEventManager.SendEvent(events.EventScanProgress, 10)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Retrieving local files...")
startTime := time.Now()
// Invoke ScanStarted hook
event := &ScanStartedEvent{
LibraryPath: scn.DirPath,
OtherLibraryPaths: scn.OtherDirPaths,
Enhanced: scn.Enhanced,
SkipLocked: scn.SkipLockedFiles,
SkipIgnored: scn.SkipIgnoredFiles,
LocalFiles: scn.ExistingLocalFiles,
}
_ = hook.GlobalHookManager.OnScanStarted().Trigger(event)
scn.DirPath = event.LibraryPath
scn.OtherDirPaths = event.OtherLibraryPaths
scn.Enhanced = event.Enhanced
scn.SkipLockedFiles = event.SkipLocked
scn.SkipIgnoredFiles = event.SkipIgnored
// Default prevented, return the local files
if event.DefaultPrevented {
// Invoke ScanCompleted hook
completedEvent := &ScanCompletedEvent{
LocalFiles: event.LocalFiles,
Duration: int(time.Since(startTime).Milliseconds()),
}
hook.GlobalHookManager.OnScanCompleted().Trigger(completedEvent)
return completedEvent.LocalFiles, nil
}
// +---------------------+
// | File paths |
// +---------------------+
libraryPaths := append([]string{scn.DirPath}, scn.OtherDirPaths...)
// Create a map of local file paths used to avoid duplicates
retrievedPathMap := make(map[string]struct{})
paths := make([]string, 0)
mu := sync.Mutex{}
logMu := sync.Mutex{}
wg := sync.WaitGroup{}
wg.Add(len(libraryPaths))
// Get local files from all directories
for i, dirPath := range libraryPaths {
go func(dirPath string, i int) {
defer wg.Done()
retrievedPaths, err := filesystem.GetMediaFilePathsFromDirS(dirPath)
if err != nil {
scn.Logger.Error().Msgf("scanner: An error occurred while retrieving local files from directory: %s", err)
return
}
if scn.ScanLogger != nil {
logMu.Lock()
if i == 0 {
scn.ScanLogger.logger.Info().
Any("count", len(paths)).
Msgf("Retrieved file paths from main directory: %s", dirPath)
} else {
scn.ScanLogger.logger.Info().
Any("count", len(retrievedPaths)).
Msgf("Retrieved file paths from other directory: %s", dirPath)
}
logMu.Unlock()
}
for _, path := range retrievedPaths {
if _, ok := retrievedPathMap[util.NormalizePath(path)]; !ok {
mu.Lock()
paths = append(paths, path)
mu.Unlock()
}
}
}(dirPath, i)
}
wg.Wait()
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Info().
Any("count", len(paths)).
Msg("Retrieved file paths from all directories")
}
// Invoke ScanFilePathsRetrieved hook
fpEvent := &ScanFilePathsRetrievedEvent{
FilePaths: paths,
}
_ = hook.GlobalHookManager.OnScanFilePathsRetrieved().Trigger(fpEvent)
paths = fpEvent.FilePaths
// +---------------------+
// | Local files |
// +---------------------+
localFiles := make([]*anime.LocalFile, 0)
// Get skipped files depending on options
skippedLfs := make(map[string]*anime.LocalFile)
if (scn.SkipLockedFiles || scn.SkipIgnoredFiles) && scn.ExistingLocalFiles != nil {
// Retrieve skipped files from existing local files
for _, lf := range scn.ExistingLocalFiles {
if scn.SkipLockedFiles && lf.IsLocked() {
skippedLfs[lf.GetNormalizedPath()] = lf
} else if scn.SkipIgnoredFiles && lf.IsIgnored() {
skippedLfs[lf.GetNormalizedPath()] = lf
}
}
}
// Create local files from paths (skipping skipped files)
localFiles = lop.Map(paths, func(path string, _ int) *anime.LocalFile {
if _, ok := skippedLfs[util.NormalizePath(path)]; !ok {
// Create a new local file
return anime.NewLocalFileS(path, libraryPaths)
} else {
return nil
}
})
// Remove nil values
localFiles = lo.Filter(localFiles, func(lf *anime.LocalFile, _ int) bool {
return lf != nil
})
// Invoke ScanLocalFilesParsed hook
parsedEvent := &ScanLocalFilesParsedEvent{
LocalFiles: localFiles,
}
_ = hook.GlobalHookManager.OnScanLocalFilesParsed().Trigger(parsedEvent)
localFiles = parsedEvent.LocalFiles
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Debug().
Any("count", len(localFiles)).
Msg("Local files to be scanned")
scn.ScanLogger.logger.Debug().
Any("count", len(skippedLfs)).
Msg("Skipped files")
scn.ScanLogger.logger.Debug().
Msg("===========================================================================================================")
}
for _, lf := range localFiles {
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Trace().
Str("path", lf.Path).
Str("filename", lf.Name).
Interface("parsedData", lf.ParsedData).
Interface("parsedFolderData", lf.ParsedFolderData).
Msg("Parsed local file")
}
}
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Debug().
Msg("===========================================================================================================")
}
// DEVNOTE: Removed library path checking because it causes some issues with symlinks
// +---------------------+
// | No files to scan |
// +---------------------+
// If there are no local files to scan (all files are skipped, or a file was deleted)
if len(localFiles) == 0 {
scn.WSEventManager.SendEvent(events.EventScanProgress, 90)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Verifying file integrity...")
// Add skipped files
if len(skippedLfs) > 0 {
for _, sf := range skippedLfs {
if filesystem.FileExists(sf.Path) { // Verify that the file still exists
localFiles = append(localFiles, sf)
}
}
}
scn.Logger.Debug().Msg("scanner: Scan completed")
scn.WSEventManager.SendEvent(events.EventScanProgress, 100)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Scan completed")
// Invoke ScanCompleted hook
completedEvent := &ScanCompletedEvent{
LocalFiles: localFiles,
Duration: int(time.Since(startTime).Milliseconds()),
}
hook.GlobalHookManager.OnScanCompleted().Trigger(completedEvent)
localFiles = completedEvent.LocalFiles
return localFiles, nil
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 20)
if scn.Enhanced {
scn.WSEventManager.SendEvent(events.EventScanStatus, "Fetching media detected from file titles...")
} else {
scn.WSEventManager.SendEvent(events.EventScanStatus, "Fetching media...")
}
// +---------------------+
// | MediaFetcher |
// +---------------------+
// Fetch media needed for matching
mf, err := NewMediaFetcher(ctx, &MediaFetcherOptions{
Enhanced: scn.Enhanced,
Platform: scn.Platform,
MetadataProvider: scn.MetadataProvider,
LocalFiles: localFiles,
CompleteAnimeCache: completeAnimeCache,
Logger: scn.Logger,
AnilistRateLimiter: anilistRateLimiter,
DisableAnimeCollection: false,
ScanLogger: scn.ScanLogger,
})
if err != nil {
return nil, err
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 40)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Matching local files...")
// +---------------------+
// | MediaContainer |
// +---------------------+
// Create a new container for media
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: mf.AllMedia,
ScanLogger: scn.ScanLogger,
})
scn.Logger.Debug().
Any("count", len(mc.NormalizedMedia)).
Msg("media container: Media container created")
// +---------------------+
// | Matcher |
// +---------------------+
// Create a new matcher
matcher := &Matcher{
LocalFiles: localFiles,
MediaContainer: mc,
CompleteAnimeCache: completeAnimeCache,
Logger: scn.Logger,
ScanLogger: scn.ScanLogger,
ScanSummaryLogger: scn.ScanSummaryLogger,
Algorithm: scn.MatchingAlgorithm,
Threshold: scn.MatchingThreshold,
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 60)
err = matcher.MatchLocalFilesWithMedia()
if err != nil {
// If the matcher received no local files, return an error
if errors.Is(err, ErrNoLocalFiles) {
scn.Logger.Debug().Msg("scanner: Scan completed")
scn.WSEventManager.SendEvent(events.EventScanProgress, 100)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Scan completed")
}
return nil, err
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 70)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Hydrating metadata...")
// +---------------------+
// | FileHydrator |
// +---------------------+
// Create a new hydrator
hydrator := &FileHydrator{
AllMedia: mc.NormalizedMedia,
LocalFiles: localFiles,
MetadataProvider: scn.MetadataProvider,
Platform: scn.Platform,
CompleteAnimeCache: completeAnimeCache,
AnilistRateLimiter: anilistRateLimiter,
Logger: scn.Logger,
ScanLogger: scn.ScanLogger,
ScanSummaryLogger: scn.ScanSummaryLogger,
}
hydrator.HydrateMetadata()
scn.WSEventManager.SendEvent(events.EventScanProgress, 80)
// +---------------------+
// | Add missing media |
// +---------------------+
// Add non-added media entries to AniList collection
// Max of 4 to avoid rate limit issues
if len(mf.UnknownMediaIds) < 5 {
scn.WSEventManager.SendEvent(events.EventScanStatus, "Adding missing media to AniList...")
if err = scn.Platform.AddMediaToCollection(ctx, mf.UnknownMediaIds); err != nil {
scn.Logger.Warn().Msg("scanner: An error occurred while adding media to planning list: " + err.Error())
}
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 90)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Verifying file integrity...")
// Hydrate the summary logger before merging files
scn.ScanSummaryLogger.HydrateData(localFiles, mc.NormalizedMedia, mf.AnimeCollectionWithRelations)
// +---------------------+
// | Merge files |
// +---------------------+
// Merge skipped files with scanned files
// Only files that exist (this removes deleted/moved files)
if len(skippedLfs) > 0 {
wg := sync.WaitGroup{}
mu := sync.Mutex{}
wg.Add(len(skippedLfs))
for _, skippedLf := range skippedLfs {
go func(skippedLf *anime.LocalFile) {
defer wg.Done()
if filesystem.FileExists(skippedLf.Path) {
mu.Lock()
localFiles = append(localFiles, skippedLf)
mu.Unlock()
}
}(skippedLf)
}
wg.Wait()
}
scn.Logger.Info().Msg("scanner: Scan completed")
scn.WSEventManager.SendEvent(events.EventScanProgress, 100)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Scan completed")
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Info().
Int("count", len(localFiles)).
Int("unknownMediaCount", len(mf.UnknownMediaIds)).
Msg("Scan completed")
}
// Invoke ScanCompleted hook
completedEvent := &ScanCompletedEvent{
LocalFiles: localFiles,
Duration: int(time.Since(startTime).Milliseconds()),
}
hook.GlobalHookManager.OnScanCompleted().Trigger(completedEvent)
localFiles = completedEvent.LocalFiles
return localFiles, nil
}

View File

@@ -0,0 +1,107 @@
package scanner
import (
"bytes"
"fmt"
"os"
"path/filepath"
"time"
"github.com/rs/zerolog"
)
// ScanLogger is a custom logger struct for scanning operations.
type ScanLogger struct {
logger *zerolog.Logger
logFile *os.File
buffer *bytes.Buffer
}
// NewScanLogger creates a new ScanLogger with a log file named based on the current datetime.
// - dir: The directory to save the log file in. This should come from the config.
func NewScanLogger(outputDir string) (*ScanLogger, error) {
// Generate a log file name with the current datetime
logFileName := fmt.Sprintf("%s-scan.log", time.Now().Format("2006-01-02_15-04-05"))
// Create the logs directory if it doesn't exist
if _, err := os.Stat(outputDir); os.IsNotExist(err) {
err := os.Mkdir(outputDir, 0755)
if err != nil {
return nil, err
}
}
// Open the log file for writing
logFile, err := os.OpenFile(filepath.Join(outputDir, logFileName), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return nil, err
}
// Create a buffer for storing log entries
buffer := new(bytes.Buffer)
// Create an array writer to wrap the JSON encoder
logger := zerolog.New(buffer).With().Logger()
return &ScanLogger{&logger, logFile, buffer}, nil
}
// NewConsoleScanLogger creates a new mock ScanLogger
func NewConsoleScanLogger() (*ScanLogger, error) {
output := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: time.DateTime,
}
// Create an array writer to wrap the JSON encoder
logger := zerolog.New(output).With().Logger()
return &ScanLogger{logger: &logger, logFile: nil, buffer: nil}, nil
}
func (sl *ScanLogger) LogMediaContainer(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "MediaContainer")
}
func (sl *ScanLogger) LogMatcher(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "Matcher")
}
func (sl *ScanLogger) LogFileHydrator(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "FileHydrator")
}
func (sl *ScanLogger) LogMediaFetcher(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "MediaFetcher")
}
// Done flushes the buffer to the log file and closes the file.
func (sl *ScanLogger) Done() error {
if sl.logFile == nil {
return nil
}
// Write buffer contents to the log file
_, err := sl.logFile.Write(sl.buffer.Bytes())
if err != nil {
return err
}
// Sync and close the log file
err = sl.logFile.Sync()
if err != nil {
return err
}
return sl.logFile.Close()
}
func (sl *ScanLogger) Close() {
if sl.logFile == nil {
return
}
err := sl.logFile.Sync()
if err != nil {
return
}
}

View File

@@ -0,0 +1,124 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
)
func TestScanLogger(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal(err.Error())
}
allMedia := animeCollection.GetAllAnime()
metadataProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
tests := []struct {
name string
paths []string
expectedMediaId int
}{
{
name: "should be hydrated with id 131586",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: 131586, // 86 - Eighty Six Part 2
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewScanLogger("./logs")
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, "E:/Anime")
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
for _, nm := range mc.NormalizedMedia {
t.Logf("media id: %d, title: %s", nm.ID, nm.GetTitleSafe())
}
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: completeAnimeCache,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
}
err = matcher.MatchLocalFilesWithMedia()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | FileHydrator |
// +---------------------+
fh := FileHydrator{
LocalFiles: lfs,
AllMedia: mc.NormalizedMedia,
CompleteAnimeCache: completeAnimeCache,
Platform: anilistPlatform,
MetadataProvider: metadataProvider,
AnilistRateLimiter: anilistRateLimiter,
Logger: logger,
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
ForceMediaId: 0,
}
fh.HydrateMetadata()
for _, lf := range fh.LocalFiles {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
})
}
}

View File

@@ -0,0 +1,79 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/events"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
//----------------------------------------------------------------------------------------------------------------------
func TestScanner_Scan(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
wsEventManager := events.NewMockWSEventManager(util.NewLogger())
dir := "E:/Anime"
tests := []struct {
name string
paths []string
}{
{
name: "Scan",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
existingLfs := make([]*anime.LocalFile, 0)
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
existingLfs = append(existingLfs, lf)
}
// +---------------------+
// | Scan |
// +---------------------+
scanner := &Scanner{
DirPath: dir,
Enhanced: false,
Platform: anilistPlatform,
Logger: util.NewLogger(),
WSEventManager: wsEventManager,
ExistingLocalFiles: existingLfs,
SkipLockedFiles: false,
SkipIgnoredFiles: false,
ScanLogger: nil,
ScanSummaryLogger: nil,
}
lfs, err := scanner.Scan(t.Context())
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
for _, lf := range lfs {
t.Log(lf.Name)
}
})
}
}

View File

@@ -0,0 +1,116 @@
package scanner
import (
"os"
"path/filepath"
"seanime/internal/events"
"strings"
"github.com/fsnotify/fsnotify"
"github.com/rs/zerolog"
)
// Watcher is a custom file system event watcher
type Watcher struct {
Watcher *fsnotify.Watcher
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
TotalSize string
}
type NewWatcherOptions struct {
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
}
// NewWatcher creates a new Watcher instance for monitoring a directory and its subdirectories
func NewWatcher(opts *NewWatcherOptions) (*Watcher, error) {
watcher, err := fsnotify.NewWatcher()
if err != nil {
return nil, err
}
return &Watcher{
Watcher: watcher,
Logger: opts.Logger,
WSEventManager: opts.WSEventManager,
}, nil
}
//----------------------------------------------------------------------------------------------------------------------
type WatchLibraryFilesOptions struct {
LibraryPaths []string
}
// InitLibraryFileWatcher starts watching the specified directory and its subdirectories for file system events
func (w *Watcher) InitLibraryFileWatcher(opts *WatchLibraryFilesOptions) error {
// Define a function to add directories and their subdirectories to the watcher
watchDir := func(dir string) error {
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
if info.IsDir() {
return w.Watcher.Add(path)
}
return nil
})
return err
}
// Add the initial directory and its subdirectories to the watcher
for _, path := range opts.LibraryPaths {
if err := watchDir(path); err != nil {
return err
}
}
w.Logger.Info().Msgf("watcher: Watching directories: %+v", opts.LibraryPaths)
return nil
}
func (w *Watcher) StartWatching(
onFileAction func(),
) {
// Start a goroutine to handle file system events
go func() {
for {
select {
case event, ok := <-w.Watcher.Events:
if !ok {
return
}
//if event.Op&fsnotify.Write == fsnotify.Write {
//}
if strings.Contains(event.Name, ".part") || strings.Contains(event.Name, ".tmp") {
continue
}
if event.Op&fsnotify.Create == fsnotify.Create {
w.Logger.Debug().Msgf("watcher: File created: %s", event.Name)
w.WSEventManager.SendEvent(events.LibraryWatcherFileAdded, event.Name)
onFileAction()
}
if event.Op&fsnotify.Remove == fsnotify.Remove {
w.Logger.Debug().Msgf("watcher: File removed: %s", event.Name)
w.WSEventManager.SendEvent(events.LibraryWatcherFileRemoved, event.Name)
onFileAction()
}
case err, ok := <-w.Watcher.Errors:
if !ok {
return
}
w.Logger.Warn().Err(err).Msgf("watcher: Error while watching directory")
}
}
}()
}
func (w *Watcher) StopWatching() {
err := w.Watcher.Close()
if err == nil {
w.Logger.Trace().Err(err).Msgf("watcher: Watcher stopped")
}
}