node build fixed

This commit is contained in:
ra_ma
2025-09-20 14:08:38 +01:00
parent c6ebbe069d
commit 3d298fa434
1516 changed files with 535727 additions and 2 deletions

View File

@@ -0,0 +1,8 @@
# anime
This package contains structs that represent the main data structures of the local anime library.
Such as `LocalFile` and `LibraryEntry`.
### 🚫 Do not
- Do not import **database**.

View File

@@ -0,0 +1,35 @@
package anime
// DEVNOTE: The structs are defined in this file because they are imported by both the autodownloader package and the db package.
// Defining them in the autodownloader package would create a circular dependency because the db package imports these structs.
const (
AutoDownloaderRuleTitleComparisonContains AutoDownloaderRuleTitleComparisonType = "contains"
AutoDownloaderRuleTitleComparisonLikely AutoDownloaderRuleTitleComparisonType = "likely"
)
const (
AutoDownloaderRuleEpisodeRecent AutoDownloaderRuleEpisodeType = "recent"
AutoDownloaderRuleEpisodeSelected AutoDownloaderRuleEpisodeType = "selected"
)
type (
AutoDownloaderRuleTitleComparisonType string
AutoDownloaderRuleEpisodeType string
// AutoDownloaderRule is a rule that is used to automatically download media.
// The structs are sent to the client, thus adding `dbId` to facilitate mutations.
AutoDownloaderRule struct {
DbID uint `json:"dbId"` // Will be set when fetched from the database
Enabled bool `json:"enabled"`
MediaId int `json:"mediaId"`
ReleaseGroups []string `json:"releaseGroups"`
Resolutions []string `json:"resolutions"`
ComparisonTitle string `json:"comparisonTitle"`
TitleComparisonType AutoDownloaderRuleTitleComparisonType `json:"titleComparisonType"`
EpisodeType AutoDownloaderRuleEpisodeType `json:"episodeType"`
EpisodeNumbers []int `json:"episodeNumbers,omitempty"`
Destination string `json:"destination"`
AdditionalTerms []string `json:"additionalTerms"`
}
)

View File

@@ -0,0 +1,467 @@
package anime
import (
"cmp"
"context"
"path/filepath"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"slices"
"sort"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"github.com/sourcegraph/conc/pool"
)
type (
// LibraryCollection holds the main data for the library collection.
// It consists of:
// - ContinueWatchingList: a list of Episode for the "continue watching" feature.
// - Lists: a list of LibraryCollectionList (one for each status).
// - UnmatchedLocalFiles: a list of unmatched local files (Media id == 0). "Resolve unmatched" feature.
// - UnmatchedGroups: a list of UnmatchedGroup instances. Like UnmatchedLocalFiles, but grouped by directory. "Resolve unmatched" feature.
// - IgnoredLocalFiles: a list of ignored local files. (DEVNOTE: Unused for now)
// - UnknownGroups: a list of UnknownGroup instances. Group of files whose media is not in the user's AniList "Resolve unknown media" feature.
LibraryCollection struct {
ContinueWatchingList []*Episode `json:"continueWatchingList"`
Lists []*LibraryCollectionList `json:"lists"`
UnmatchedLocalFiles []*LocalFile `json:"unmatchedLocalFiles"`
UnmatchedGroups []*UnmatchedGroup `json:"unmatchedGroups"`
IgnoredLocalFiles []*LocalFile `json:"ignoredLocalFiles"`
UnknownGroups []*UnknownGroup `json:"unknownGroups"`
Stats *LibraryCollectionStats `json:"stats"`
Stream *StreamCollection `json:"stream,omitempty"` // Hydrated by the route handler
}
StreamCollection struct {
ContinueWatchingList []*Episode `json:"continueWatchingList"`
Anime []*anilist.BaseAnime `json:"anime"`
ListData map[int]*EntryListData `json:"listData"`
}
LibraryCollectionListType string
LibraryCollectionStats struct {
TotalEntries int `json:"totalEntries"`
TotalFiles int `json:"totalFiles"`
TotalShows int `json:"totalShows"`
TotalMovies int `json:"totalMovies"`
TotalSpecials int `json:"totalSpecials"`
TotalSize string `json:"totalSize"`
}
LibraryCollectionList struct {
Type anilist.MediaListStatus `json:"type"`
Status anilist.MediaListStatus `json:"status"`
Entries []*LibraryCollectionEntry `json:"entries"`
}
// LibraryCollectionEntry holds the data for a single entry in a LibraryCollectionList.
// It is a slimmed down version of Entry. It holds the media, media id, library data, and list data.
LibraryCollectionEntry struct {
Media *anilist.BaseAnime `json:"media"`
MediaId int `json:"mediaId"`
EntryLibraryData *EntryLibraryData `json:"libraryData"` // Library data
NakamaEntryLibraryData *NakamaEntryLibraryData `json:"nakamaLibraryData,omitempty"` // Library data from Nakama
EntryListData *EntryListData `json:"listData"` // AniList list data
}
// UnmatchedGroup holds the data for a group of unmatched local files.
UnmatchedGroup struct {
Dir string `json:"dir"`
LocalFiles []*LocalFile `json:"localFiles"`
Suggestions []*anilist.BaseAnime `json:"suggestions"`
}
// UnknownGroup holds the data for a group of local files whose media is not in the user's AniList.
// The client will use this data to suggest media to the user, so they can add it to their AniList.
UnknownGroup struct {
MediaId int `json:"mediaId"`
LocalFiles []*LocalFile `json:"localFiles"`
}
)
type (
// NewLibraryCollectionOptions is a struct that holds the data needed for creating a new LibraryCollection.
NewLibraryCollectionOptions struct {
AnimeCollection *anilist.AnimeCollection
LocalFiles []*LocalFile
Platform platform.Platform
MetadataProvider metadata.Provider
}
)
// NewLibraryCollection creates a new LibraryCollection.
func NewLibraryCollection(ctx context.Context, opts *NewLibraryCollectionOptions) (lc *LibraryCollection, err error) {
defer util.HandlePanicInModuleWithError("entities/collection/NewLibraryCollection", &err)
lc = new(LibraryCollection)
reqEvent := &AnimeLibraryCollectionRequestedEvent{
AnimeCollection: opts.AnimeCollection,
LocalFiles: opts.LocalFiles,
LibraryCollection: lc,
}
err = hook.GlobalHookManager.OnAnimeLibraryCollectionRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
opts.AnimeCollection = reqEvent.AnimeCollection // Override the anime collection
opts.LocalFiles = reqEvent.LocalFiles // Override the local files
lc = reqEvent.LibraryCollection // Override the library collection
if reqEvent.DefaultPrevented {
event := &AnimeLibraryCollectionEvent{
LibraryCollection: lc,
}
err = hook.GlobalHookManager.OnAnimeLibraryCollection().Trigger(event)
if err != nil {
return nil, err
}
return event.LibraryCollection, nil
}
// Get lists from collection
aniLists := opts.AnimeCollection.GetMediaListCollection().GetLists()
// Create lists
lc.hydrateCollectionLists(
opts.LocalFiles,
aniLists,
)
lc.hydrateStats(opts.LocalFiles)
// Add Continue Watching list
lc.hydrateContinueWatchingList(
ctx,
opts.LocalFiles,
opts.AnimeCollection,
opts.Platform,
opts.MetadataProvider,
)
lc.UnmatchedLocalFiles = lo.Filter(opts.LocalFiles, func(lf *LocalFile, index int) bool {
return lf.MediaId == 0 && !lf.Ignored
})
lc.IgnoredLocalFiles = lo.Filter(opts.LocalFiles, func(lf *LocalFile, index int) bool {
return lf.Ignored == true
})
slices.SortStableFunc(lc.IgnoredLocalFiles, func(i, j *LocalFile) int {
return cmp.Compare(i.GetPath(), j.GetPath())
})
lc.hydrateUnmatchedGroups()
// Event
event := &AnimeLibraryCollectionEvent{
LibraryCollection: lc,
}
hook.GlobalHookManager.OnAnimeLibraryCollection().Trigger(event)
lc = event.LibraryCollection
return
}
//----------------------------------------------------------------------------------------------------------------------
func (lc *LibraryCollection) hydrateCollectionLists(
localFiles []*LocalFile,
aniLists []*anilist.AnimeCollection_MediaListCollection_Lists,
) {
// Group local files by media id
groupedLfs := GroupLocalFilesByMediaID(localFiles)
// Get slice of media ids from local files
mIds := GetMediaIdsFromLocalFiles(localFiles)
foundIds := make([]int, 0)
for _, list := range aniLists {
entries := list.GetEntries()
for _, entry := range entries {
foundIds = append(foundIds, entry.Media.ID)
}
}
// Create a new LibraryCollectionList for each list
// This is done in parallel
p := pool.NewWithResults[*LibraryCollectionList]()
for _, list := range aniLists {
p.Go(func() *LibraryCollectionList {
// If the list has no status, return nil
// This occurs when there are custom lists (DEVNOTE: This shouldn't occur because we remove custom lists when the collection is fetched)
if list.Status == nil {
return nil
}
// For each list, get the entries
entries := list.GetEntries()
// For each entry, check if the media id is in the local files
// If it is, create a new LibraryCollectionEntry with the associated local files
p2 := pool.NewWithResults[*LibraryCollectionEntry]()
for _, entry := range entries {
p2.Go(func() *LibraryCollectionEntry {
if slices.Contains(mIds, entry.Media.ID) {
entryLfs, _ := groupedLfs[entry.Media.ID]
libraryData, _ := NewEntryLibraryData(&NewEntryLibraryDataOptions{
EntryLocalFiles: entryLfs,
MediaId: entry.Media.ID,
CurrentProgress: entry.GetProgressSafe(),
})
return &LibraryCollectionEntry{
MediaId: entry.Media.ID,
Media: entry.Media,
EntryLibraryData: libraryData,
EntryListData: &EntryListData{
Progress: entry.GetProgressSafe(),
Score: entry.GetScoreSafe(),
Status: entry.Status,
Repeat: entry.GetRepeatSafe(),
StartedAt: anilist.ToEntryStartDate(entry.StartedAt),
CompletedAt: anilist.ToEntryCompletionDate(entry.CompletedAt),
},
}
} else {
return nil
}
})
}
r := p2.Wait()
// Filter out nil entries
r = lo.Filter(r, func(item *LibraryCollectionEntry, index int) bool {
return item != nil
})
// Sort by title
sort.Slice(r, func(i, j int) bool {
return r[i].Media.GetTitleSafe() < r[j].Media.GetTitleSafe()
})
// Return a new LibraryEntries struct
return &LibraryCollectionList{
Type: getLibraryCollectionEntryFromListStatus(*list.Status),
Status: *list.Status,
Entries: r,
}
})
}
// Get the lists from the pool
lists := p.Wait()
// Filter out nil entries
lists = lo.Filter(lists, func(item *LibraryCollectionList, index int) bool {
return item != nil
})
// Merge repeating to current (no need to show repeating as a separate list)
repeatingList, ok := lo.Find(lists, func(item *LibraryCollectionList) bool {
return item.Status == anilist.MediaListStatusRepeating
})
if ok {
currentList, ok := lo.Find(lists, func(item *LibraryCollectionList) bool {
return item.Status == anilist.MediaListStatusCurrent
})
if len(repeatingList.Entries) > 0 && ok {
currentList.Entries = append(currentList.Entries, repeatingList.Entries...)
} else if len(repeatingList.Entries) > 0 {
newCurrentList := repeatingList
newCurrentList.Type = anilist.MediaListStatusCurrent
lists = append(lists, newCurrentList)
}
// Remove repeating from lists
lists = lo.Filter(lists, func(item *LibraryCollectionList, index int) bool {
return item.Status != anilist.MediaListStatusRepeating
})
}
// Lists
lc.Lists = lists
if lc.Lists == nil {
lc.Lists = make([]*LibraryCollectionList, 0)
}
// +---------------------+
// | Unknown media ids |
// +---------------------+
unknownIds := make([]int, 0)
for _, id := range mIds {
if id != 0 && !slices.Contains(foundIds, id) {
unknownIds = append(unknownIds, id)
}
}
lc.UnknownGroups = make([]*UnknownGroup, 0)
for _, id := range unknownIds {
lc.UnknownGroups = append(lc.UnknownGroups, &UnknownGroup{
MediaId: id,
LocalFiles: groupedLfs[id],
})
}
return
}
//----------------------------------------------------------------------------------------------------------------------
func (lc *LibraryCollection) hydrateStats(lfs []*LocalFile) {
stats := &LibraryCollectionStats{
TotalFiles: len(lfs),
TotalEntries: 0,
TotalShows: 0,
TotalMovies: 0,
TotalSpecials: 0,
TotalSize: "", // Will be set by the route handler
}
for _, list := range lc.Lists {
for _, entry := range list.Entries {
stats.TotalEntries++
if entry.Media.Format != nil {
if *entry.Media.Format == anilist.MediaFormatMovie {
stats.TotalMovies++
} else if *entry.Media.Format == anilist.MediaFormatSpecial || *entry.Media.Format == anilist.MediaFormatOva {
stats.TotalSpecials++
} else {
stats.TotalShows++
}
}
}
}
lc.Stats = stats
}
//----------------------------------------------------------------------------------------------------------------------
// hydrateContinueWatchingList creates a list of Episode for the "continue watching" feature.
// This should be called after the LibraryCollectionList's have been created.
func (lc *LibraryCollection) hydrateContinueWatchingList(
ctx context.Context,
localFiles []*LocalFile,
animeCollection *anilist.AnimeCollection,
platform platform.Platform,
metadataProvider metadata.Provider,
) {
// Get currently watching list
current, found := lo.Find(lc.Lists, func(item *LibraryCollectionList) bool {
return item.Status == anilist.MediaListStatusCurrent
})
// If no currently watching list is found, return an empty slice
if !found {
lc.ContinueWatchingList = make([]*Episode, 0) // Set empty slice
return
}
// Get media ids from current list
mIds := make([]int, len(current.Entries))
for i, entry := range current.Entries {
mIds[i] = entry.MediaId
}
// Create a new Entry for each media id
mEntryPool := pool.NewWithResults[*Entry]()
for _, mId := range mIds {
mEntryPool.Go(func() *Entry {
me, _ := NewEntry(ctx, &NewEntryOptions{
MediaId: mId,
LocalFiles: localFiles,
AnimeCollection: animeCollection,
Platform: platform,
MetadataProvider: metadataProvider,
})
return me
})
}
mEntries := mEntryPool.Wait()
mEntries = lo.Filter(mEntries, func(item *Entry, index int) bool {
return item != nil
}) // Filter out nil entries
// If there are no entries, return an empty slice
if len(mEntries) == 0 {
lc.ContinueWatchingList = make([]*Episode, 0) // Return empty slice
return
}
// Sort by progress
sort.Slice(mEntries, func(i, j int) bool {
return mEntries[i].EntryListData.Progress > mEntries[j].EntryListData.Progress
})
// Remove entries the user has watched all episodes of
mEntries = lop.Map(mEntries, func(mEntry *Entry, index int) *Entry {
if !mEntry.HasWatchedAll() {
return mEntry
}
return nil
})
mEntries = lo.Filter(mEntries, func(item *Entry, index int) bool {
return item != nil
})
// Get the next episode for each media entry
mEpisodes := lop.Map(mEntries, func(mEntry *Entry, index int) *Episode {
ep, ok := mEntry.FindNextEpisode()
if ok {
return ep
}
return nil
})
mEpisodes = lo.Filter(mEpisodes, func(item *Episode, index int) bool {
return item != nil
})
lc.ContinueWatchingList = mEpisodes
}
//----------------------------------------------------------------------------------------------------------------------
// hydrateUnmatchedGroups is a method of the LibraryCollection struct.
// It is responsible for grouping unmatched local files by their directory and creating UnmatchedGroup instances for each group.
func (lc *LibraryCollection) hydrateUnmatchedGroups() {
groups := make([]*UnmatchedGroup, 0)
// Group by directory
groupedLfs := lop.GroupBy(lc.UnmatchedLocalFiles, func(lf *LocalFile) string {
return filepath.Dir(lf.GetPath())
})
for key, value := range groupedLfs {
groups = append(groups, &UnmatchedGroup{
Dir: key,
LocalFiles: value,
Suggestions: make([]*anilist.BaseAnime, 0),
})
}
slices.SortStableFunc(groups, func(i, j *UnmatchedGroup) int {
return cmp.Compare(i.Dir, j.Dir)
})
// Assign the created groups
lc.UnmatchedGroups = groups
}
//----------------------------------------------------------------------------------------------------------------------
// getLibraryCollectionEntryFromListStatus maps anilist.MediaListStatus to LibraryCollectionListType.
func getLibraryCollectionEntryFromListStatus(st anilist.MediaListStatus) anilist.MediaListStatus {
if st == anilist.MediaListStatusRepeating {
return anilist.MediaListStatusCurrent
}
return st
}

View File

@@ -0,0 +1,95 @@
package anime_test
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
)
func TestNewLibraryCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
logger := util.NewLogger()
metadataProvider := metadata.GetMockProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), false)
if assert.NoError(t, err) {
// Mock Anilist collection and local files
// User is currently watching Sousou no Frieren and One Piece
lfs := make([]*anime.LocalFile, 0)
// Sousou no Frieren
// 7 episodes downloaded, 4 watched
mediaId := 154587
lfs = append(lfs, anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv", mediaId, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 6, MetadataAniDbEpisode: "6", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 7, MetadataAniDbEpisode: "7", MetadataType: anime.LocalFileTypeMain},
}),
)...)
anilist.TestModifyAnimeCollectionEntry(animeCollection, mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Status: lo.ToPtr(anilist.MediaListStatusCurrent),
Progress: lo.ToPtr(4), // Mock progress
})
// One Piece
// Downloaded 1070-1075 but only watched up until 1060
mediaId = 21
lfs = append(lfs, anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\One Piece\\[SubsPlease] One Piece - %ep (1080p) [F02B9CEE].mkv", mediaId, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1070, MetadataAniDbEpisode: "1070", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1071, MetadataAniDbEpisode: "1071", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1072, MetadataAniDbEpisode: "1072", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1073, MetadataAniDbEpisode: "1073", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1074, MetadataAniDbEpisode: "1074", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1075, MetadataAniDbEpisode: "1075", MetadataType: anime.LocalFileTypeMain},
}),
)...)
anilist.TestModifyAnimeCollectionEntry(animeCollection, mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Status: lo.ToPtr(anilist.MediaListStatusCurrent),
Progress: lo.ToPtr(1060), // Mock progress
})
// Add unmatched local files
mediaId = 0
lfs = append(lfs, anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Unmatched\\[SubsPlease] Unmatched - %ep (1080p) [F02B9CEE].mkv", mediaId, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
}),
)...)
libraryCollection, err := anime.NewLibraryCollection(t.Context(), &anime.NewLibraryCollectionOptions{
AnimeCollection: animeCollection,
LocalFiles: lfs,
Platform: anilistPlatform,
MetadataProvider: metadataProvider,
})
if assert.NoError(t, err) {
assert.Equal(t, 1, len(libraryCollection.ContinueWatchingList)) // Only Sousou no Frieren is in the continue watching list
assert.Equal(t, 4, len(libraryCollection.UnmatchedLocalFiles)) // 4 unmatched local files
}
}
}

View File

@@ -0,0 +1,377 @@
package anime
import (
"context"
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/platforms/platform"
"sort"
"github.com/samber/lo"
"github.com/sourcegraph/conc/pool"
)
type (
// Entry is a container for all data related to a media.
// It is the primary data structure used by the frontend.
Entry struct {
MediaId int `json:"mediaId"`
Media *anilist.BaseAnime `json:"media"`
EntryListData *EntryListData `json:"listData"`
EntryLibraryData *EntryLibraryData `json:"libraryData"`
EntryDownloadInfo *EntryDownloadInfo `json:"downloadInfo,omitempty"`
Episodes []*Episode `json:"episodes"`
NextEpisode *Episode `json:"nextEpisode"`
LocalFiles []*LocalFile `json:"localFiles"`
AnidbId int `json:"anidbId"`
CurrentEpisodeCount int `json:"currentEpisodeCount"`
IsNakamaEntry bool `json:"_isNakamaEntry"`
NakamaLibraryData *NakamaEntryLibraryData `json:"nakamaLibraryData,omitempty"`
}
// EntryListData holds the details of the AniList entry.
EntryListData struct {
Progress int `json:"progress,omitempty"`
Score float64 `json:"score,omitempty"`
Status *anilist.MediaListStatus `json:"status,omitempty"`
Repeat int `json:"repeat,omitempty"`
StartedAt string `json:"startedAt,omitempty"`
CompletedAt string `json:"completedAt,omitempty"`
}
)
type (
// NewEntryOptions is a constructor for Entry.
NewEntryOptions struct {
MediaId int
LocalFiles []*LocalFile // All local files
AnimeCollection *anilist.AnimeCollection
Platform platform.Platform
MetadataProvider metadata.Provider
IsSimulated bool // If the account is simulated
}
)
// NewEntry creates a new Entry based on the media id and a list of local files.
// A Entry is a container for all data related to a media.
// It is the primary data structure used by the frontend.
//
// It has the following properties:
// - EntryListData: Details of the AniList entry (if any)
// - EntryLibraryData: Details of the local files (if any)
// - EntryDownloadInfo: Details of the download status
// - Episodes: List of episodes (if any)
// - NextEpisode: Next episode to watch (if any)
// - LocalFiles: List of local files (if any)
// - AnidbId: AniDB id
// - CurrentEpisodeCount: Current episode count
func NewEntry(ctx context.Context, opts *NewEntryOptions) (*Entry, error) {
// Create new Entry
entry := new(Entry)
entry.MediaId = opts.MediaId
reqEvent := new(AnimeEntryRequestedEvent)
reqEvent.MediaId = opts.MediaId
reqEvent.LocalFiles = opts.LocalFiles
reqEvent.AnimeCollection = opts.AnimeCollection
reqEvent.Entry = entry
err := hook.GlobalHookManager.OnAnimeEntryRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
opts.MediaId = reqEvent.MediaId // Override the media ID
opts.LocalFiles = reqEvent.LocalFiles // Override the local files
opts.AnimeCollection = reqEvent.AnimeCollection // Override the anime collection
entry = reqEvent.Entry // Override the entry
// Default prevented, return the modified entry
if reqEvent.DefaultPrevented {
event := new(AnimeEntryEvent)
event.Entry = reqEvent.Entry
err = hook.GlobalHookManager.OnAnimeEntry().Trigger(event)
if err != nil {
return nil, err
}
if event.Entry == nil {
return nil, errors.New("no entry was returned")
}
return event.Entry, nil
}
if opts.AnimeCollection == nil ||
opts.Platform == nil {
return nil, errors.New("missing arguments when creating media entry")
}
// +---------------------+
// | AniList entry |
// +---------------------+
// Get the Anilist List entry
anilistEntry, found := opts.AnimeCollection.GetListEntryFromAnimeId(opts.MediaId)
// Set the media
// If the Anilist List entry does not exist, fetch the media from AniList
if !found {
// If the Anilist entry does not exist, instantiate one with zero values
anilistEntry = &anilist.AnimeListEntry{}
// Fetch the media
fetchedMedia, err := opts.Platform.GetAnime(ctx, opts.MediaId) // DEVNOTE: Maybe cache it?
if err != nil {
return nil, err
}
entry.Media = fetchedMedia
} else {
animeEvent := new(anilist_platform.GetAnimeEvent)
animeEvent.Anime = anilistEntry.Media
err := hook.GlobalHookManager.OnGetAnime().Trigger(animeEvent)
if err != nil {
return nil, err
}
entry.Media = animeEvent.Anime
}
// If the account is simulated and the media was in the library, we will still fetch
// the media from AniList to ensure we have the latest data
if opts.IsSimulated && found {
// Fetch the media
fetchedMedia, err := opts.Platform.GetAnime(ctx, opts.MediaId) // DEVNOTE: Maybe cache it?
if err != nil {
return nil, err
}
entry.Media = fetchedMedia
}
entry.CurrentEpisodeCount = entry.Media.GetCurrentEpisodeCount()
// +---------------------+
// | Local files |
// +---------------------+
// Get the entry's local files
lfs := GetLocalFilesFromMediaId(opts.LocalFiles, opts.MediaId)
entry.LocalFiles = lfs // Returns empty slice if no local files are found
libraryData, _ := NewEntryLibraryData(&NewEntryLibraryDataOptions{
EntryLocalFiles: lfs,
MediaId: entry.Media.ID,
CurrentProgress: anilistEntry.GetProgressSafe(),
})
entry.EntryLibraryData = libraryData
// +---------------------+
// | Animap |
// +---------------------+
// Fetch AniDB data and cache it for 30 minutes
animeMetadata, err := opts.MetadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, opts.MediaId)
if err != nil {
// +---------------- Start
// +---------------------+
// | Without Animap |
// +---------------------+
// If Animap data is not found, we will still create the Entry without it
simpleAnimeEntry, err := NewSimpleEntry(ctx, &NewSimpleAnimeEntryOptions{
MediaId: opts.MediaId,
LocalFiles: opts.LocalFiles,
AnimeCollection: opts.AnimeCollection,
Platform: opts.Platform,
})
if err != nil {
return nil, err
}
event := &AnimeEntryEvent{
Entry: &Entry{
MediaId: simpleAnimeEntry.MediaId,
Media: simpleAnimeEntry.Media,
EntryListData: simpleAnimeEntry.EntryListData,
EntryLibraryData: simpleAnimeEntry.EntryLibraryData,
EntryDownloadInfo: nil,
Episodes: simpleAnimeEntry.Episodes,
NextEpisode: simpleAnimeEntry.NextEpisode,
LocalFiles: simpleAnimeEntry.LocalFiles,
AnidbId: 0,
CurrentEpisodeCount: simpleAnimeEntry.CurrentEpisodeCount,
},
}
err = hook.GlobalHookManager.OnAnimeEntry().Trigger(event)
if err != nil {
return nil, err
}
return event.Entry, nil
// +--------------- End
}
entry.AnidbId = animeMetadata.GetMappings().AnidbId
// Instantiate EntryListData
// If the media exist in the user's anime list, add the details
if found {
entry.EntryListData = NewEntryListData(anilistEntry)
}
// +---------------------+
// | Episodes |
// +---------------------+
// Create episode entities
entry.hydrateEntryEpisodeData(anilistEntry, animeMetadata, opts.MetadataProvider)
event := &AnimeEntryEvent{
Entry: entry,
}
err = hook.GlobalHookManager.OnAnimeEntry().Trigger(event)
if err != nil {
return nil, err
}
return event.Entry, nil
}
//----------------------------------------------------------------------------------------------------------------------
// hydrateEntryEpisodeData
// AniZipData, Media and LocalFiles should be defined
func (e *Entry) hydrateEntryEpisodeData(
anilistEntry *anilist.AnimeListEntry,
animeMetadata *metadata.AnimeMetadata,
metadataProvider metadata.Provider,
) {
if animeMetadata.Episodes == nil && len(animeMetadata.Episodes) == 0 {
return
}
// +---------------------+
// | Discrepancy |
// +---------------------+
// We offset the progress number by 1 if there is a discrepancy
progressOffset := 0
if FindDiscrepancy(e.Media, animeMetadata) == DiscrepancyAniListCountsEpisodeZero {
progressOffset = 1
_, ok := lo.Find(e.LocalFiles, func(lf *LocalFile) bool {
return lf.Metadata.Episode == 0
})
// Remove the offset if episode 0 is not found
if !ok {
progressOffset = 0
}
}
// +---------------------+
// | Episodes |
// +---------------------+
p := pool.NewWithResults[*Episode]()
for _, lf := range e.LocalFiles {
p.Go(func() *Episode {
return NewEpisode(&NewEpisodeOptions{
LocalFile: lf,
OptionalAniDBEpisode: "",
AnimeMetadata: animeMetadata,
Media: e.Media,
ProgressOffset: progressOffset,
IsDownloaded: true,
MetadataProvider: metadataProvider,
})
})
}
episodes := p.Wait()
// Sort by progress number
sort.Slice(episodes, func(i, j int) bool {
return episodes[i].EpisodeNumber < episodes[j].EpisodeNumber
})
e.Episodes = episodes
// +---------------------+
// | Download Info |
// +---------------------+
info, err := NewEntryDownloadInfo(&NewEntryDownloadInfoOptions{
LocalFiles: e.LocalFiles,
AnimeMetadata: animeMetadata,
Progress: anilistEntry.Progress,
Status: anilistEntry.Status,
Media: e.Media,
MetadataProvider: metadataProvider,
})
if err == nil {
e.EntryDownloadInfo = info
}
nextEp, found := e.FindNextEpisode()
if found {
e.NextEpisode = nextEp
}
}
func NewEntryListData(anilistEntry *anilist.AnimeListEntry) *EntryListData {
return &EntryListData{
Progress: anilistEntry.GetProgressSafe(),
Score: anilistEntry.GetScoreSafe(),
Status: anilistEntry.Status,
Repeat: anilistEntry.GetRepeatSafe(),
StartedAt: anilist.FuzzyDateToString(anilistEntry.StartedAt),
CompletedAt: anilist.FuzzyDateToString(anilistEntry.CompletedAt),
}
}
//----------------------------------------------------------------------------------------------------------------------
type Discrepancy int
const (
DiscrepancyAniListCountsEpisodeZero Discrepancy = iota
DiscrepancyAniListCountsSpecials
DiscrepancyAniDBHasMore
DiscrepancyNone
)
// FindDiscrepancy returns the discrepancy between the AniList and AniDB episode counts.
// It returns DiscrepancyAniListCountsEpisodeZero if AniList most likely has episode 0 as part of the main count.
// It returns DiscrepancyAniListCountsSpecials if there is a discrepancy between the AniList and AniDB episode counts and specials are included in the AniList count.
// It returns DiscrepancyAniDBHasMore if the AniDB episode count is greater than the AniList episode count.
// It returns DiscrepancyNone if there is no discrepancy.
func FindDiscrepancy(media *anilist.BaseAnime, animeMetadata *metadata.AnimeMetadata) Discrepancy {
if media == nil || animeMetadata == nil || animeMetadata.Episodes == nil {
return DiscrepancyNone
}
_, aniDBHasS1 := animeMetadata.Episodes["S1"]
_, aniDBHasS2 := animeMetadata.Episodes["S2"]
difference := media.GetCurrentEpisodeCount() - animeMetadata.GetMainEpisodeCount()
if difference == 0 {
return DiscrepancyNone
}
if difference < 0 {
return DiscrepancyAniDBHasMore
}
if difference == 1 && aniDBHasS1 {
return DiscrepancyAniListCountsEpisodeZero
}
if difference > 1 && aniDBHasS1 && aniDBHasS2 {
return DiscrepancyAniListCountsSpecials
}
return DiscrepancyNone
}

View File

@@ -0,0 +1,350 @@
package anime
import (
"errors"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"strconv"
"github.com/samber/lo"
"github.com/sourcegraph/conc/pool"
)
type (
// EntryDownloadInfo is instantiated by the Entry
EntryDownloadInfo struct {
EpisodesToDownload []*EntryDownloadEpisode `json:"episodesToDownload"`
CanBatch bool `json:"canBatch"`
BatchAll bool `json:"batchAll"`
HasInaccurateSchedule bool `json:"hasInaccurateSchedule"`
Rewatch bool `json:"rewatch"`
AbsoluteOffset int `json:"absoluteOffset"`
}
EntryDownloadEpisode struct {
EpisodeNumber int `json:"episodeNumber"`
AniDBEpisode string `json:"aniDBEpisode"`
Episode *Episode `json:"episode"`
}
)
type (
NewEntryDownloadInfoOptions struct {
// Media's local files
LocalFiles []*LocalFile
AnimeMetadata *metadata.AnimeMetadata
Media *anilist.BaseAnime
Progress *int
Status *anilist.MediaListStatus
MetadataProvider metadata.Provider
}
)
// NewEntryDownloadInfo returns a list of episodes to download or episodes for the torrent/debrid streaming views
// based on the options provided.
func NewEntryDownloadInfo(opts *NewEntryDownloadInfoOptions) (*EntryDownloadInfo, error) {
reqEvent := &AnimeEntryDownloadInfoRequestedEvent{
LocalFiles: opts.LocalFiles,
AnimeMetadata: opts.AnimeMetadata,
Media: opts.Media,
Progress: opts.Progress,
Status: opts.Status,
EntryDownloadInfo: &EntryDownloadInfo{},
}
err := hook.GlobalHookManager.OnAnimeEntryDownloadInfoRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
if reqEvent.DefaultPrevented {
return reqEvent.EntryDownloadInfo, nil
}
opts.LocalFiles = reqEvent.LocalFiles
opts.AnimeMetadata = reqEvent.AnimeMetadata
opts.Media = reqEvent.Media
opts.Progress = reqEvent.Progress
opts.Status = reqEvent.Status
if *opts.Media.Status == anilist.MediaStatusNotYetReleased {
return &EntryDownloadInfo{}, nil
}
if opts.AnimeMetadata == nil {
return nil, errors.New("could not get anime metadata")
}
currentEpisodeCount := opts.Media.GetCurrentEpisodeCount()
if currentEpisodeCount == -1 && opts.AnimeMetadata != nil {
currentEpisodeCount = opts.AnimeMetadata.GetCurrentEpisodeCount()
}
if currentEpisodeCount == -1 {
return nil, errors.New("could not get current media episode count")
}
// +---------------------+
// | Discrepancy |
// +---------------------+
// Whether AniList includes episode 0 as part of main episodes, but AniDB does not, however AniDB has "S1"
discrepancy := FindDiscrepancy(opts.Media, opts.AnimeMetadata)
// AniList is the source of truth for episode numbers
epSlice := newEpisodeSlice(currentEpisodeCount)
// Handle discrepancies
if discrepancy != DiscrepancyNone {
// If AniList includes episode 0 as part of main episodes, but AniDB does not, however AniDB has "S1"
if discrepancy == DiscrepancyAniListCountsEpisodeZero {
// Add "S1" to the beginning of the episode slice
epSlice.trimEnd(1)
epSlice.prepend(0, "S1")
}
// If AniList includes specials, but AniDB does not
if discrepancy == DiscrepancyAniListCountsSpecials {
diff := currentEpisodeCount - opts.AnimeMetadata.GetMainEpisodeCount()
epSlice.trimEnd(diff)
for i := 0; i < diff; i++ {
epSlice.add(currentEpisodeCount-i, "S"+strconv.Itoa(i+1))
}
}
// If AniDB has more episodes than AniList
if discrepancy == DiscrepancyAniDBHasMore {
// Do nothing
}
}
// Filter out episodes not aired
if opts.Media.NextAiringEpisode != nil {
epSlice.filter(func(item *episodeSliceItem, index int) bool {
// e.g. if the next airing episode is 13, then filter out episodes 14 and above
return index+1 < opts.Media.NextAiringEpisode.Episode
})
}
// Get progress, if the media isn't in the user's list, progress is 0
// If the media is completed, set progress is 0
progress := 0
if opts.Progress != nil {
progress = *opts.Progress
}
if opts.Status != nil {
if *opts.Status == anilist.MediaListStatusCompleted {
progress = 0
}
}
hasInaccurateSchedule := false
if opts.Media.NextAiringEpisode == nil && *opts.Media.Status == anilist.MediaStatusReleasing {
hasInaccurateSchedule = true
}
// Filter out episodes already watched (index+1 is the progress number)
toDownloadSlice := epSlice.filterNew(func(item *episodeSliceItem, index int) bool {
return index+1 > progress
})
// This slice contains episode numbers that are not downloaded
// The source of truth is AniDB, but we will handle discrepancies
lfsEpSlice := newEpisodeSlice(0)
if opts.LocalFiles != nil {
// Get all episode numbers of main local files
for _, lf := range opts.LocalFiles {
if lf.Metadata.Type == LocalFileTypeMain {
lfsEpSlice.add(lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
}
}
}
// Filter out downloaded episodes
toDownloadSlice.filter(func(item *episodeSliceItem, index int) bool {
isDownloaded := false
for _, lf := range opts.LocalFiles {
if lf.Metadata.Type != LocalFileTypeMain {
continue
}
// If the file episode number matches that of the episode slice item
if lf.Metadata.Episode == item.episodeNumber {
isDownloaded = true
}
// If the slice episode number is 0 and the file is a main S1
if discrepancy == DiscrepancyAniListCountsEpisodeZero && item.episodeNumber == 0 && lf.Metadata.AniDBEpisode == "S1" {
isDownloaded = true
}
}
return !isDownloaded
})
// +---------------------+
// | EntryEpisode |
// +---------------------+
// Generate `episodesToDownload` based on `toDownloadSlice`
// DEVNOTE: The EntryEpisode generated has inaccurate progress numbers since not local files are passed in
progressOffset := 0
if discrepancy == DiscrepancyAniListCountsEpisodeZero {
progressOffset = 1
}
p := pool.NewWithResults[*EntryDownloadEpisode]()
for _, ep := range toDownloadSlice.getSlice() {
p.Go(func() *EntryDownloadEpisode {
str := new(EntryDownloadEpisode)
str.EpisodeNumber = ep.episodeNumber
str.AniDBEpisode = ep.aniDBEpisode
// Create a new episode with a placeholder local file
// We pass that placeholder local file so that all episodes are hydrated as main episodes for consistency
str.Episode = NewEpisode(&NewEpisodeOptions{
LocalFile: &LocalFile{
ParsedData: &LocalFileParsedData{},
ParsedFolderData: []*LocalFileParsedData{},
Metadata: &LocalFileMetadata{
Episode: ep.episodeNumber,
Type: LocalFileTypeMain,
AniDBEpisode: ep.aniDBEpisode,
},
},
OptionalAniDBEpisode: str.AniDBEpisode,
AnimeMetadata: opts.AnimeMetadata,
Media: opts.Media,
ProgressOffset: progressOffset,
IsDownloaded: false,
MetadataProvider: opts.MetadataProvider,
})
str.Episode.AniDBEpisode = ep.aniDBEpisode
// Reset the local file to nil, since it's a placeholder
str.Episode.LocalFile = nil
return str
})
}
episodesToDownload := p.Wait()
//--------------
canBatch := false
if *opts.Media.GetStatus() == anilist.MediaStatusFinished && opts.Media.GetTotalEpisodeCount() > 0 {
canBatch = true
}
batchAll := false
if canBatch && lfsEpSlice.len() == 0 && progress == 0 {
batchAll = true
}
rewatch := false
if opts.Status != nil && *opts.Status == anilist.MediaListStatusCompleted {
rewatch = true
}
downloadInfo := &EntryDownloadInfo{
EpisodesToDownload: episodesToDownload,
CanBatch: canBatch,
BatchAll: batchAll,
Rewatch: rewatch,
HasInaccurateSchedule: hasInaccurateSchedule,
AbsoluteOffset: opts.AnimeMetadata.GetOffset(),
}
event := &AnimeEntryDownloadInfoEvent{
EntryDownloadInfo: downloadInfo,
}
err = hook.GlobalHookManager.OnAnimeEntryDownloadInfo().Trigger(event)
if err != nil {
return nil, err
}
return event.EntryDownloadInfo, nil
}
type episodeSliceItem struct {
episodeNumber int
aniDBEpisode string
}
type episodeSlice []*episodeSliceItem
func newEpisodeSlice(episodeCount int) *episodeSlice {
s := make([]*episodeSliceItem, 0)
for i := 0; i < episodeCount; i++ {
s = append(s, &episodeSliceItem{episodeNumber: i + 1, aniDBEpisode: strconv.Itoa(i + 1)})
}
ret := &episodeSlice{}
ret.set(s)
return ret
}
func (s *episodeSlice) set(eps []*episodeSliceItem) {
*s = eps
}
func (s *episodeSlice) add(episodeNumber int, aniDBEpisode string) {
*s = append(*s, &episodeSliceItem{episodeNumber: episodeNumber, aniDBEpisode: aniDBEpisode})
}
func (s *episodeSlice) prepend(episodeNumber int, aniDBEpisode string) {
*s = append([]*episodeSliceItem{{episodeNumber: episodeNumber, aniDBEpisode: aniDBEpisode}}, *s...)
}
func (s *episodeSlice) trimEnd(n int) {
*s = (*s)[:len(*s)-n]
}
func (s *episodeSlice) trimStart(n int) {
*s = (*s)[n:]
}
func (s *episodeSlice) len() int {
return len(*s)
}
func (s *episodeSlice) get(index int) *episodeSliceItem {
return (*s)[index]
}
func (s *episodeSlice) getEpisodeNumber(episodeNumber int) *episodeSliceItem {
for _, item := range *s {
if item.episodeNumber == episodeNumber {
return item
}
}
return nil
}
func (s *episodeSlice) filter(filter func(*episodeSliceItem, int) bool) {
*s = lo.Filter(*s, filter)
}
func (s *episodeSlice) filterNew(filter func(*episodeSliceItem, int) bool) *episodeSlice {
s2 := make(episodeSlice, 0)
for i, item := range *s {
if filter(item, i) {
s2 = append(s2, item)
}
}
return &s2
}
func (s *episodeSlice) copy() *episodeSlice {
s2 := make(episodeSlice, len(*s), cap(*s))
for i, item := range *s {
s2[i] = item
}
return &s2
}
func (s *episodeSlice) getSlice() []*episodeSliceItem {
return *s
}
func (s *episodeSlice) print() {
for i, item := range *s {
fmt.Printf("(%d) %d -> %s\n", i, item.episodeNumber, item.aniDBEpisode)
}
}

View File

@@ -0,0 +1,168 @@
package anime_test
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewEntryDownloadInfo(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
metadataProvider := metadata.GetMockProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
if err != nil {
t.Fatal(err)
}
tests := []struct {
name string
localFiles []*anime.LocalFile
mediaId int
currentProgress int
status anilist.MediaListStatus
expectedEpisodeNumbersToDownload []struct {
episodeNumber int
aniDbEpisode string
}
}{
{
// AniList includes episode 0 as a main episode but AniDB lists it as a special S1
// So we should expect to see episode 0 (S1) in the list of episodes to download
name: "Mushoku Tensei: Jobless Reincarnation Season 2",
localFiles: nil,
mediaId: 146065,
currentProgress: 0,
status: anilist.MediaListStatusCurrent,
expectedEpisodeNumbersToDownload: []struct {
episodeNumber int
aniDbEpisode string
}{
{episodeNumber: 0, aniDbEpisode: "S1"},
{episodeNumber: 1, aniDbEpisode: "1"},
{episodeNumber: 2, aniDbEpisode: "2"},
{episodeNumber: 3, aniDbEpisode: "3"},
{episodeNumber: 4, aniDbEpisode: "4"},
{episodeNumber: 5, aniDbEpisode: "5"},
{episodeNumber: 6, aniDbEpisode: "6"},
{episodeNumber: 7, aniDbEpisode: "7"},
{episodeNumber: 8, aniDbEpisode: "8"},
{episodeNumber: 9, aniDbEpisode: "9"},
{episodeNumber: 10, aniDbEpisode: "10"},
{episodeNumber: 11, aniDbEpisode: "11"},
{episodeNumber: 12, aniDbEpisode: "12"},
},
},
{
// Same as above but progress of 1 should just eliminate episode 0 from the list and not episode 1
name: "Mushoku Tensei: Jobless Reincarnation Season 2 - 2",
localFiles: nil,
mediaId: 146065,
currentProgress: 1,
status: anilist.MediaListStatusCurrent,
expectedEpisodeNumbersToDownload: []struct {
episodeNumber int
aniDbEpisode string
}{
{episodeNumber: 1, aniDbEpisode: "1"},
{episodeNumber: 2, aniDbEpisode: "2"},
{episodeNumber: 3, aniDbEpisode: "3"},
{episodeNumber: 4, aniDbEpisode: "4"},
{episodeNumber: 5, aniDbEpisode: "5"},
{episodeNumber: 6, aniDbEpisode: "6"},
{episodeNumber: 7, aniDbEpisode: "7"},
{episodeNumber: 8, aniDbEpisode: "8"},
{episodeNumber: 9, aniDbEpisode: "9"},
{episodeNumber: 10, aniDbEpisode: "10"},
{episodeNumber: 11, aniDbEpisode: "11"},
{episodeNumber: 12, aniDbEpisode: "12"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
anilistEntry, _ := animeCollection.GetListEntryFromAnimeId(tt.mediaId)
animeMetadata, err := metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, tt.mediaId)
require.NoError(t, err)
info, err := anime.NewEntryDownloadInfo(&anime.NewEntryDownloadInfoOptions{
LocalFiles: tt.localFiles,
Progress: &tt.currentProgress,
Status: &tt.status,
Media: anilistEntry.Media,
MetadataProvider: metadataProvider,
AnimeMetadata: animeMetadata,
})
if assert.NoError(t, err) && assert.NotNil(t, info) {
foundEpToDownload := make([]struct {
episodeNumber int
aniDbEpisode string
}, 0)
for _, ep := range info.EpisodesToDownload {
foundEpToDownload = append(foundEpToDownload, struct {
episodeNumber int
aniDbEpisode string
}{
episodeNumber: ep.EpisodeNumber,
aniDbEpisode: ep.AniDBEpisode,
})
}
assert.ElementsMatch(t, tt.expectedEpisodeNumbersToDownload, foundEpToDownload)
}
})
}
}
func TestNewEntryDownloadInfo2(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
mediaId := 21
metadataProvider := metadata.GetMockProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
if err != nil {
t.Fatal(err)
}
anilistEntry, _ := animeCollection.GetListEntryFromAnimeId(mediaId)
animeMetadata, err := metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, mediaId)
require.NoError(t, err)
info, err := anime.NewEntryDownloadInfo(&anime.NewEntryDownloadInfoOptions{
LocalFiles: nil,
Progress: lo.ToPtr(0),
Status: lo.ToPtr(anilist.MediaListStatusCurrent),
Media: anilistEntry.Media,
MetadataProvider: metadataProvider,
AnimeMetadata: animeMetadata,
})
require.NoError(t, err)
require.NotNil(t, info)
t.Log(len(info.EpisodesToDownload))
assert.GreaterOrEqual(t, len(info.EpisodesToDownload), 1096)
}

View File

@@ -0,0 +1,251 @@
package anime
import "github.com/samber/lo"
// HasWatchedAll returns true if all episodes have been watched.
// Returns false if there are no downloaded episodes.
func (e *Entry) HasWatchedAll() bool {
// If there are no episodes, return nil
latestEp, ok := e.FindLatestEpisode()
if !ok {
return false
}
return e.GetCurrentProgress() >= latestEp.GetProgressNumber()
}
// FindNextEpisode returns the episode whose episode number is the same as the progress number + 1.
// Returns false if there are no episodes or if there is no next episode.
func (e *Entry) FindNextEpisode() (*Episode, bool) {
eps, ok := e.FindMainEpisodes()
if !ok {
return nil, false
}
ep, ok := lo.Find(eps, func(ep *Episode) bool {
return ep.GetProgressNumber() == e.GetCurrentProgress()+1
})
if !ok {
return nil, false
}
return ep, true
}
// FindLatestEpisode returns the *main* episode with the highest episode number.
// Returns false if there are no episodes.
func (e *Entry) FindLatestEpisode() (*Episode, bool) {
// If there are no episodes, return nil
eps, ok := e.FindMainEpisodes()
if !ok {
return nil, false
}
// Get the episode with the highest progress number
latest := eps[0]
for _, ep := range eps {
if ep.GetProgressNumber() > latest.GetProgressNumber() {
latest = ep
}
}
return latest, true
}
// FindLatestLocalFile returns the *main* local file with the highest episode number.
// Returns false if there are no local files.
func (e *Entry) FindLatestLocalFile() (*LocalFile, bool) {
lfs, ok := e.FindMainLocalFiles()
// If there are no local files, return nil
if !ok {
return nil, false
}
// Get the local file with the highest episode number
latest := lfs[0]
for _, lf := range lfs {
if lf.GetEpisodeNumber() > latest.GetEpisodeNumber() {
latest = lf
}
}
return latest, true
}
//----------------------------------------------------------------------------------------------------------------------
// GetCurrentProgress returns the progress number.
// If the media entry is not in any AniList list, returns 0.
func (e *Entry) GetCurrentProgress() int {
listData, ok := e.FindListData()
if !ok {
return 0
}
return listData.Progress
}
// FindEpisodes returns the episodes.
// Returns false if there are no episodes.
func (e *Entry) FindEpisodes() ([]*Episode, bool) {
if e.Episodes == nil {
return nil, false
}
return e.Episodes, true
}
// FindMainEpisodes returns the main episodes.
// Returns false if there are no main episodes.
func (e *Entry) FindMainEpisodes() ([]*Episode, bool) {
if e.Episodes == nil {
return nil, false
}
eps := make([]*Episode, 0)
for _, ep := range e.Episodes {
if ep.IsMain() {
eps = append(eps, ep)
}
}
return e.Episodes, true
}
// FindLocalFiles returns the local files.
// Returns false if there are no local files.
func (e *Entry) FindLocalFiles() ([]*LocalFile, bool) {
if !e.IsDownloaded() {
return nil, false
}
return e.LocalFiles, true
}
// FindMainLocalFiles returns *main* local files.
// Returns false if there are no local files.
func (e *Entry) FindMainLocalFiles() ([]*LocalFile, bool) {
if !e.IsDownloaded() {
return nil, false
}
lfs := make([]*LocalFile, 0)
for _, lf := range e.LocalFiles {
if lf.IsMain() {
lfs = append(lfs, lf)
}
}
if len(lfs) == 0 {
return nil, false
}
return lfs, true
}
// IsDownloaded returns true if there are local files.
func (e *Entry) IsDownloaded() bool {
if e.LocalFiles == nil {
return false
}
return len(e.LocalFiles) > 0
}
func (e *Entry) FindListData() (*EntryListData, bool) {
if e.EntryListData == nil {
return nil, false
}
return e.EntryListData, true
}
func (e *Entry) IsInAnimeCollection() bool {
_, ok := e.FindListData()
return ok
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (e *SimpleEntry) GetCurrentProgress() int {
listData, ok := e.FindListData()
if !ok {
return 0
}
return listData.Progress
}
func (e *SimpleEntry) FindMainEpisodes() ([]*Episode, bool) {
if e.Episodes == nil {
return nil, false
}
eps := make([]*Episode, 0)
for _, ep := range e.Episodes {
if ep.IsMain() {
eps = append(eps, ep)
}
}
return e.Episodes, true
}
func (e *SimpleEntry) FindNextEpisode() (*Episode, bool) {
eps, ok := e.FindMainEpisodes()
if !ok {
return nil, false
}
ep, ok := lo.Find(eps, func(ep *Episode) bool {
return ep.GetProgressNumber() == e.GetCurrentProgress()+1
})
if !ok {
return nil, false
}
return ep, true
}
func (e *SimpleEntry) FindLatestEpisode() (*Episode, bool) {
// If there are no episodes, return nil
eps, ok := e.FindMainEpisodes()
if !ok {
return nil, false
}
// Get the episode with the highest progress number
latest := eps[0]
for _, ep := range eps {
if ep.GetProgressNumber() > latest.GetProgressNumber() {
latest = ep
}
}
return latest, true
}
func (e *SimpleEntry) FindLatestLocalFile() (*LocalFile, bool) {
lfs, ok := e.FindMainLocalFiles()
// If there are no local files, return nil
if !ok {
return nil, false
}
// Get the local file with the highest episode number
latest := lfs[0]
for _, lf := range lfs {
if lf.GetEpisodeNumber() > latest.GetEpisodeNumber() {
latest = lf
}
}
return latest, true
}
func (e *SimpleEntry) FindMainLocalFiles() ([]*LocalFile, bool) {
if e.LocalFiles == nil {
return nil, false
}
if len(e.LocalFiles) == 0 {
return nil, false
}
lfs := make([]*LocalFile, 0)
for _, lf := range e.LocalFiles {
if lf.IsMain() {
lfs = append(lfs, lf)
}
}
if len(lfs) == 0 {
return nil, false
}
return lfs, true
}
func (e *SimpleEntry) FindListData() (*EntryListData, bool) {
if e.EntryListData == nil {
return nil, false
}
return e.EntryListData, true
}
func (e *SimpleEntry) IsInAnimeCollection() bool {
_, ok := e.FindListData()
return ok
}

View File

@@ -0,0 +1,77 @@
package anime
import (
"seanime/internal/hook"
"strings"
"github.com/samber/lo"
)
type (
EntryLibraryData struct {
AllFilesLocked bool `json:"allFilesLocked"`
SharedPath string `json:"sharedPath"`
UnwatchedCount int `json:"unwatchedCount"`
MainFileCount int `json:"mainFileCount"`
}
NakamaEntryLibraryData struct {
UnwatchedCount int `json:"unwatchedCount"`
MainFileCount int `json:"mainFileCount"`
}
NewEntryLibraryDataOptions struct {
EntryLocalFiles []*LocalFile
MediaId int
CurrentProgress int
}
)
// NewEntryLibraryData creates a new EntryLibraryData based on the media id and a list of local files related to the media.
// It will return false if the list of local files is empty.
func NewEntryLibraryData(opts *NewEntryLibraryDataOptions) (ret *EntryLibraryData, ok bool) {
reqEvent := new(AnimeEntryLibraryDataRequestedEvent)
reqEvent.EntryLocalFiles = opts.EntryLocalFiles
reqEvent.MediaId = opts.MediaId
reqEvent.CurrentProgress = opts.CurrentProgress
err := hook.GlobalHookManager.OnAnimeEntryLibraryDataRequested().Trigger(reqEvent)
if err != nil {
return nil, false
}
if reqEvent.EntryLocalFiles == nil || len(reqEvent.EntryLocalFiles) == 0 {
return nil, false
}
sharedPath := strings.Replace(reqEvent.EntryLocalFiles[0].Path, reqEvent.EntryLocalFiles[0].Name, "", 1)
sharedPath = strings.TrimSuffix(strings.TrimSuffix(sharedPath, "\\"), "/")
ret = &EntryLibraryData{
AllFilesLocked: lo.EveryBy(reqEvent.EntryLocalFiles, func(item *LocalFile) bool { return item.Locked }),
SharedPath: sharedPath,
}
ok = true
lfw := NewLocalFileWrapper(reqEvent.EntryLocalFiles)
lfwe, ok := lfw.GetLocalEntryById(reqEvent.MediaId)
if !ok {
return ret, true
}
ret.UnwatchedCount = len(lfwe.GetUnwatchedLocalFiles(reqEvent.CurrentProgress))
mainLfs, ok := lfwe.GetMainLocalFiles()
if !ok {
return ret, true
}
ret.MainFileCount = len(mainLfs)
event := new(AnimeEntryLibraryDataEvent)
event.EntryLibraryData = ret
err = hook.GlobalHookManager.OnAnimeEntryLibraryData().Trigger(event)
if err != nil {
return nil, false
}
return event.EntryLibraryData, true
}

View File

@@ -0,0 +1,148 @@
package anime
import (
"context"
"errors"
"seanime/internal/api/anilist"
"seanime/internal/platforms/platform"
"sort"
"github.com/sourcegraph/conc/pool"
)
type (
SimpleEntry struct {
MediaId int `json:"mediaId"`
Media *anilist.BaseAnime `json:"media"`
EntryListData *EntryListData `json:"listData"`
EntryLibraryData *EntryLibraryData `json:"libraryData"`
Episodes []*Episode `json:"episodes"`
NextEpisode *Episode `json:"nextEpisode"`
LocalFiles []*LocalFile `json:"localFiles"`
CurrentEpisodeCount int `json:"currentEpisodeCount"`
}
SimpleEntryListData struct {
Progress int `json:"progress,omitempty"`
Score float64 `json:"score,omitempty"`
Status *anilist.MediaListStatus `json:"status,omitempty"`
StartedAt string `json:"startedAt,omitempty"`
CompletedAt string `json:"completedAt,omitempty"`
}
NewSimpleAnimeEntryOptions struct {
MediaId int
LocalFiles []*LocalFile // All local files
AnimeCollection *anilist.AnimeCollection
Platform platform.Platform
}
)
func NewSimpleEntry(ctx context.Context, opts *NewSimpleAnimeEntryOptions) (*SimpleEntry, error) {
if opts.AnimeCollection == nil ||
opts.Platform == nil {
return nil, errors.New("missing arguments when creating simple media entry")
}
// Create new Entry
entry := new(SimpleEntry)
entry.MediaId = opts.MediaId
// +---------------------+
// | AniList entry |
// +---------------------+
// Get the Anilist List entry
anilistEntry, found := opts.AnimeCollection.GetListEntryFromAnimeId(opts.MediaId)
// Set the media
// If the Anilist List entry does not exist, fetch the media from AniList
if !found {
// If the Anilist entry does not exist, instantiate one with zero values
anilistEntry = &anilist.AnimeListEntry{}
// Fetch the media
fetchedMedia, err := opts.Platform.GetAnime(ctx, opts.MediaId) // DEVNOTE: Maybe cache it?
if err != nil {
return nil, err
}
entry.Media = fetchedMedia
} else {
entry.Media = anilistEntry.Media
}
entry.CurrentEpisodeCount = entry.Media.GetCurrentEpisodeCount()
// +---------------------+
// | Local files |
// +---------------------+
// Get the entry's local files
lfs := GetLocalFilesFromMediaId(opts.LocalFiles, opts.MediaId)
entry.LocalFiles = lfs // Returns empty slice if no local files are found
libraryData, _ := NewEntryLibraryData(&NewEntryLibraryDataOptions{
EntryLocalFiles: lfs,
MediaId: entry.Media.ID,
CurrentProgress: anilistEntry.GetProgressSafe(),
})
entry.EntryLibraryData = libraryData
// Instantiate EntryListData
// If the media exist in the user's anime list, add the details
if found {
entry.EntryListData = &EntryListData{
Progress: anilistEntry.GetProgressSafe(),
Score: anilistEntry.GetScoreSafe(),
Status: anilistEntry.Status,
Repeat: anilistEntry.GetRepeatSafe(),
StartedAt: anilist.ToEntryStartDate(anilistEntry.StartedAt),
CompletedAt: anilist.ToEntryCompletionDate(anilistEntry.CompletedAt),
}
}
// +---------------------+
// | Episodes |
// +---------------------+
// Create episode entities
entry.hydrateEntryEpisodeData()
return entry, nil
}
//----------------------------------------------------------------------------------------------------------------------
// hydrateEntryEpisodeData
// AniZipData, Media and LocalFiles should be defined
func (e *SimpleEntry) hydrateEntryEpisodeData() {
// +---------------------+
// | Episodes |
// +---------------------+
p := pool.NewWithResults[*Episode]()
for _, lf := range e.LocalFiles {
lf := lf
p.Go(func() *Episode {
return NewSimpleEpisode(&NewSimpleEpisodeOptions{
LocalFile: lf,
Media: e.Media,
IsDownloaded: true,
})
})
}
episodes := p.Wait()
// Sort by progress number
sort.Slice(episodes, func(i, j int) bool {
return episodes[i].EpisodeNumber < episodes[j].EpisodeNumber
})
e.Episodes = episodes
nextEp, found := e.FindNextEpisode()
if found {
e.NextEpisode = nextEp
}
}

View File

@@ -0,0 +1,116 @@
package anime_test
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
)
// TestNewAnimeEntry tests /library/entry endpoint.
// /!\ MAKE SURE TO HAVE THE MEDIA ADDED TO YOUR LIST TEST ACCOUNT LISTS
func TestNewAnimeEntry(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
logger := util.NewLogger()
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mediaId int
localFiles []*anime.LocalFile
currentProgress int
expectedNextEpisodeNumber int
expectedNextEpisodeProgressNumber int
}{
{
name: "Sousou no Frieren",
mediaId: 154587,
localFiles: anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv", 154587, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
}),
),
currentProgress: 4,
expectedNextEpisodeNumber: 5,
expectedNextEpisodeProgressNumber: 5,
},
{
name: "Mushoku Tensei II Isekai Ittara Honki Dasu",
mediaId: 146065,
localFiles: anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:/Anime/Mushoku Tensei II Isekai Ittara Honki Dasu/[SubsPlease] Mushoku Tensei S2 - 00 (1080p) [9C362DC3].mkv", 146065, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 0, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain}, // Special episode
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 6, MetadataAniDbEpisode: "6", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 7, MetadataAniDbEpisode: "7", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 8, MetadataAniDbEpisode: "8", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 9, MetadataAniDbEpisode: "9", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 10, MetadataAniDbEpisode: "10", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 11, MetadataAniDbEpisode: "11", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 12, MetadataAniDbEpisode: "12", MetadataType: anime.LocalFileTypeMain},
}),
),
currentProgress: 0,
expectedNextEpisodeNumber: 0,
expectedNextEpisodeProgressNumber: 1,
},
}
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), false)
if err != nil {
t.Fatal(err)
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
anilist.TestModifyAnimeCollectionEntry(animeCollection, tt.mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Progress: lo.ToPtr(tt.currentProgress), // Mock progress
})
entry, err := anime.NewEntry(t.Context(), &anime.NewEntryOptions{
MediaId: tt.mediaId,
LocalFiles: tt.localFiles,
AnimeCollection: animeCollection,
Platform: anilistPlatform,
MetadataProvider: metadataProvider,
})
if assert.NoErrorf(t, err, "Failed to get mock data") {
if assert.NoError(t, err) {
// Mock progress is 4
nextEp, found := entry.FindNextEpisode()
if assert.True(t, found, "did not find next episode") {
assert.Equal(t, tt.expectedNextEpisodeNumber, nextEp.EpisodeNumber, "next episode number mismatch")
assert.Equal(t, tt.expectedNextEpisodeProgressNumber, nextEp.ProgressNumber, "next episode progress number mismatch")
}
t.Logf("Found %v episodes", len(entry.Episodes))
}
}
})
}
}

View File

@@ -0,0 +1,361 @@
package anime
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"strconv"
"strings"
)
type (
// Episode represents a single episode of a media entry.
Episode struct {
Type LocalFileType `json:"type"`
DisplayTitle string `json:"displayTitle"` // e.g, Show: "Episode 1", Movie: "Violet Evergarden The Movie"
EpisodeTitle string `json:"episodeTitle"` // e.g, "Shibuya Incident - Gate, Open"
EpisodeNumber int `json:"episodeNumber"`
AniDBEpisode string `json:"aniDBEpisode,omitempty"` // AniDB episode number
AbsoluteEpisodeNumber int `json:"absoluteEpisodeNumber"`
ProgressNumber int `json:"progressNumber"` // Usually the same as EpisodeNumber, unless there is a discrepancy between AniList and AniDB
LocalFile *LocalFile `json:"localFile"`
IsDownloaded bool `json:"isDownloaded"` // Is in the local files
EpisodeMetadata *EpisodeMetadata `json:"episodeMetadata"` // (image, airDate, length, summary, overview)
FileMetadata *LocalFileMetadata `json:"fileMetadata"` // (episode, aniDBEpisode, type...)
IsInvalid bool `json:"isInvalid"` // No AniDB data
MetadataIssue string `json:"metadataIssue,omitempty"` // Alerts the user that there is a discrepancy between AniList and AniDB
BaseAnime *anilist.BaseAnime `json:"baseAnime,omitempty"`
// IsNakamaEpisode indicates that this episode is from the Nakama host's anime library.
IsNakamaEpisode bool `json:"_isNakamaEpisode"`
}
// EpisodeMetadata represents the metadata of an Episode.
// Metadata is fetched from Animap (AniDB) and, optionally, AniList (if Animap is not available).
EpisodeMetadata struct {
AnidbId int `json:"anidbId,omitempty"`
Image string `json:"image,omitempty"`
AirDate string `json:"airDate,omitempty"`
Length int `json:"length,omitempty"`
Summary string `json:"summary,omitempty"`
Overview string `json:"overview,omitempty"`
IsFiller bool `json:"isFiller,omitempty"`
HasImage bool `json:"hasImage,omitempty"` // Indicates if the episode has a real image
}
)
type (
// NewEpisodeOptions hold data used to create a new Episode.
NewEpisodeOptions struct {
LocalFile *LocalFile
AnimeMetadata *metadata.AnimeMetadata // optional
Media *anilist.BaseAnime
OptionalAniDBEpisode string
// ProgressOffset will offset the ProgressNumber for a specific MAIN file
// This is used when there is a discrepancy between AniList and AniDB
// When this is -1, it means that a re-mapping of AniDB Episode is needed
ProgressOffset int
IsDownloaded bool
MetadataProvider metadata.Provider // optional
}
// NewSimpleEpisodeOptions hold data used to create a new Episode.
// Unlike NewEpisodeOptions, this struct does not require Animap data. It is used to list episodes without AniDB metadata.
NewSimpleEpisodeOptions struct {
LocalFile *LocalFile
Media *anilist.BaseAnime
IsDownloaded bool
}
)
// NewEpisode creates a new episode entity.
//
// It is used to list existing local files as episodes
// OR list non-downloaded episodes by passing the `OptionalAniDBEpisode` parameter.
//
// `AnimeMetadata` should be defined, but this is not always the case.
// `LocalFile` is optional.
func NewEpisode(opts *NewEpisodeOptions) *Episode {
entryEp := new(Episode)
entryEp.BaseAnime = opts.Media
entryEp.DisplayTitle = ""
entryEp.EpisodeTitle = ""
hydrated := false
// LocalFile exists
if opts.LocalFile != nil {
aniDBEp := opts.LocalFile.Metadata.AniDBEpisode
// ProgressOffset is -1, meaning the hydrator mistakenly set AniDB episode to "S1" (due to torrent name) because the episode number is 0
// The hydrator ASSUMES that AniDB will not include episode 0 as part of main episodes.
// We will remap "S1" to "1" and offset other AniDB episodes by 1
// e.g, ["S1", "1", "2", "3",...,"12"] -> ["1", "2", "3", "4",...,"13"]
if opts.ProgressOffset == -1 && opts.LocalFile.GetType() == LocalFileTypeMain {
if aniDBEp == "S1" {
aniDBEp = "1"
opts.ProgressOffset = 0
} else {
// e.g, "1" -> "2" etc...
aniDBEp = metadata.OffsetAnidbEpisode(aniDBEp, opts.ProgressOffset)
}
entryEp.MetadataIssue = "forced_remapping"
}
// Get the Animap episode
foundAnimapEpisode := false
var episodeMetadata *metadata.EpisodeMetadata
if opts.AnimeMetadata != nil {
episodeMetadata, foundAnimapEpisode = opts.AnimeMetadata.FindEpisode(aniDBEp)
}
entryEp.IsDownloaded = true
entryEp.FileMetadata = opts.LocalFile.GetMetadata()
entryEp.Type = opts.LocalFile.GetType()
entryEp.LocalFile = opts.LocalFile
// Set episode number and progress number
switch opts.LocalFile.Metadata.Type {
case LocalFileTypeMain:
entryEp.EpisodeNumber = opts.LocalFile.GetEpisodeNumber()
entryEp.ProgressNumber = opts.LocalFile.GetEpisodeNumber() + opts.ProgressOffset
if foundAnimapEpisode {
entryEp.AniDBEpisode = aniDBEp
entryEp.AbsoluteEpisodeNumber = entryEp.EpisodeNumber + opts.AnimeMetadata.GetOffset()
}
case LocalFileTypeSpecial:
entryEp.EpisodeNumber = opts.LocalFile.GetEpisodeNumber()
entryEp.ProgressNumber = 0
case LocalFileTypeNC:
entryEp.EpisodeNumber = 0
entryEp.ProgressNumber = 0
}
// Set titles
if len(entryEp.DisplayTitle) == 0 {
switch opts.LocalFile.Metadata.Type {
case LocalFileTypeMain:
if foundAnimapEpisode {
entryEp.AniDBEpisode = aniDBEp
if *opts.Media.GetFormat() == anilist.MediaFormatMovie {
entryEp.DisplayTitle = opts.Media.GetPreferredTitle()
entryEp.EpisodeTitle = "Complete Movie"
} else {
entryEp.DisplayTitle = "Episode " + strconv.Itoa(opts.LocalFile.GetEpisodeNumber())
entryEp.EpisodeTitle = episodeMetadata.GetTitle()
}
} else {
if *opts.Media.GetFormat() == anilist.MediaFormatMovie {
entryEp.DisplayTitle = opts.Media.GetPreferredTitle()
entryEp.EpisodeTitle = "Complete Movie"
} else {
entryEp.DisplayTitle = "Episode " + strconv.Itoa(opts.LocalFile.GetEpisodeNumber())
entryEp.EpisodeTitle = opts.LocalFile.GetParsedEpisodeTitle()
}
}
hydrated = true // Hydrated
case LocalFileTypeSpecial:
if foundAnimapEpisode {
entryEp.AniDBEpisode = aniDBEp
episodeInt, found := metadata.ExtractEpisodeInteger(aniDBEp)
if found {
entryEp.DisplayTitle = "Special " + strconv.Itoa(episodeInt)
} else {
entryEp.DisplayTitle = "Special " + aniDBEp
}
entryEp.EpisodeTitle = episodeMetadata.GetTitle()
} else {
entryEp.DisplayTitle = "Special " + strconv.Itoa(opts.LocalFile.GetEpisodeNumber())
}
hydrated = true // Hydrated
case LocalFileTypeNC:
if foundAnimapEpisode {
entryEp.AniDBEpisode = aniDBEp
entryEp.DisplayTitle = episodeMetadata.GetTitle()
entryEp.EpisodeTitle = ""
} else {
entryEp.DisplayTitle = opts.LocalFile.GetParsedTitle()
entryEp.EpisodeTitle = ""
}
hydrated = true // Hydrated
}
} else {
hydrated = true // Hydrated
}
// Set episode metadata
entryEp.EpisodeMetadata = NewEpisodeMetadata(opts.AnimeMetadata, episodeMetadata, opts.Media, opts.MetadataProvider)
} else if len(opts.OptionalAniDBEpisode) > 0 && opts.AnimeMetadata != nil {
// No LocalFile, but AniDB episode is provided
// Get the Animap episode
if episodeMetadata, foundAnimapEpisode := opts.AnimeMetadata.FindEpisode(opts.OptionalAniDBEpisode); foundAnimapEpisode {
entryEp.IsDownloaded = false
entryEp.Type = LocalFileTypeMain
if strings.HasPrefix(opts.OptionalAniDBEpisode, "S") {
entryEp.Type = LocalFileTypeSpecial
} else if strings.HasPrefix(opts.OptionalAniDBEpisode, "OP") || strings.HasPrefix(opts.OptionalAniDBEpisode, "ED") {
entryEp.Type = LocalFileTypeNC
}
entryEp.EpisodeNumber = 0
entryEp.ProgressNumber = 0
if episodeInt, ok := metadata.ExtractEpisodeInteger(opts.OptionalAniDBEpisode); ok {
entryEp.EpisodeNumber = episodeInt
entryEp.ProgressNumber = episodeInt + opts.ProgressOffset
entryEp.AniDBEpisode = opts.OptionalAniDBEpisode
entryEp.AbsoluteEpisodeNumber = entryEp.EpisodeNumber + opts.AnimeMetadata.GetOffset()
switch entryEp.Type {
case LocalFileTypeMain:
if *opts.Media.GetFormat() == anilist.MediaFormatMovie {
entryEp.DisplayTitle = opts.Media.GetPreferredTitle()
entryEp.EpisodeTitle = "Complete Movie"
} else {
entryEp.DisplayTitle = "Episode " + strconv.Itoa(episodeInt)
entryEp.EpisodeTitle = episodeMetadata.GetTitle()
}
case LocalFileTypeSpecial:
entryEp.DisplayTitle = "Special " + strconv.Itoa(episodeInt)
entryEp.EpisodeTitle = episodeMetadata.GetTitle()
case LocalFileTypeNC:
entryEp.DisplayTitle = opts.OptionalAniDBEpisode
entryEp.EpisodeTitle = ""
}
hydrated = true
}
// Set episode metadata
entryEp.EpisodeMetadata = NewEpisodeMetadata(opts.AnimeMetadata, episodeMetadata, opts.Media, opts.MetadataProvider)
} else {
// No Local file, no Animap data
// DEVNOTE: Non-downloaded, without any AniDB data. Don't handle this case.
// Non-downloaded episodes are determined from AniDB data either way.
}
}
// If for some reason the episode is not hydrated, set it as invalid
if !hydrated {
if opts.LocalFile != nil {
entryEp.DisplayTitle = opts.LocalFile.GetParsedTitle()
}
entryEp.EpisodeTitle = ""
entryEp.IsInvalid = true
return entryEp
}
return entryEp
}
// NewEpisodeMetadata creates a new EpisodeMetadata from an Animap episode and AniList media.
// If the Animap episode is nil, it will just set the image from the media.
func NewEpisodeMetadata(
animeMetadata *metadata.AnimeMetadata,
episode *metadata.EpisodeMetadata,
media *anilist.BaseAnime,
metadataProvider metadata.Provider,
) *EpisodeMetadata {
md := new(EpisodeMetadata)
// No Animap data
if episode == nil {
md.Image = media.GetCoverImageSafe()
return md
}
epInt, err := strconv.Atoi(episode.Episode)
if err == nil {
aw := metadataProvider.GetAnimeMetadataWrapper(media, animeMetadata)
epMetadata := aw.GetEpisodeMetadata(epInt)
md.AnidbId = epMetadata.AnidbId
md.Image = epMetadata.Image
md.AirDate = epMetadata.AirDate
md.Length = epMetadata.Length
md.Summary = epMetadata.Summary
md.Overview = epMetadata.Overview
md.HasImage = epMetadata.HasImage
md.IsFiller = false
} else {
md.Image = media.GetBannerImageSafe()
}
return md
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// NewSimpleEpisode creates a Episode without AniDB metadata.
func NewSimpleEpisode(opts *NewSimpleEpisodeOptions) *Episode {
entryEp := new(Episode)
entryEp.BaseAnime = opts.Media
entryEp.DisplayTitle = ""
entryEp.EpisodeTitle = ""
entryEp.EpisodeMetadata = new(EpisodeMetadata)
hydrated := false
// LocalFile exists
if opts.LocalFile != nil {
entryEp.IsDownloaded = true
entryEp.FileMetadata = opts.LocalFile.GetMetadata()
entryEp.Type = opts.LocalFile.GetType()
entryEp.LocalFile = opts.LocalFile
// Set episode number and progress number
switch opts.LocalFile.Metadata.Type {
case LocalFileTypeMain:
entryEp.EpisodeNumber = opts.LocalFile.GetEpisodeNumber()
entryEp.ProgressNumber = opts.LocalFile.GetEpisodeNumber()
hydrated = true // Hydrated
case LocalFileTypeSpecial:
entryEp.EpisodeNumber = opts.LocalFile.GetEpisodeNumber()
entryEp.ProgressNumber = 0
hydrated = true // Hydrated
case LocalFileTypeNC:
entryEp.EpisodeNumber = 0
entryEp.ProgressNumber = 0
hydrated = true // Hydrated
}
// Set titles
if len(entryEp.DisplayTitle) == 0 {
switch opts.LocalFile.Metadata.Type {
case LocalFileTypeMain:
if *opts.Media.GetFormat() == anilist.MediaFormatMovie {
entryEp.DisplayTitle = opts.Media.GetPreferredTitle()
entryEp.EpisodeTitle = "Complete Movie"
} else {
entryEp.DisplayTitle = "Episode " + strconv.Itoa(opts.LocalFile.GetEpisodeNumber())
entryEp.EpisodeTitle = opts.LocalFile.GetParsedEpisodeTitle()
}
hydrated = true // Hydrated
case LocalFileTypeSpecial:
entryEp.DisplayTitle = "Special " + strconv.Itoa(opts.LocalFile.GetEpisodeNumber())
hydrated = true // Hydrated
case LocalFileTypeNC:
entryEp.DisplayTitle = opts.LocalFile.GetParsedTitle()
entryEp.EpisodeTitle = ""
hydrated = true // Hydrated
}
}
entryEp.EpisodeMetadata.Image = opts.Media.GetCoverImageSafe()
}
if !hydrated {
if opts.LocalFile != nil {
entryEp.DisplayTitle = opts.LocalFile.GetParsedTitle()
}
entryEp.EpisodeTitle = ""
entryEp.IsInvalid = true
entryEp.MetadataIssue = "no_anidb_data"
return entryEp
}
entryEp.MetadataIssue = "no_anidb_data"
return entryEp
}

View File

@@ -0,0 +1,309 @@
package anime
import (
"cmp"
"context"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/platforms/platform"
"seanime/internal/util/result"
"slices"
"time"
"github.com/rs/zerolog"
"github.com/samber/lo"
)
var episodeCollectionCache = result.NewBoundedCache[int, *EpisodeCollection](10)
var EpisodeCollectionFromLocalFilesCache = result.NewBoundedCache[int, *EpisodeCollection](10)
type (
// EpisodeCollection represents a collection of episodes.
EpisodeCollection struct {
HasMappingError bool `json:"hasMappingError"`
Episodes []*Episode `json:"episodes"`
Metadata *metadata.AnimeMetadata `json:"metadata"`
}
)
type NewEpisodeCollectionOptions struct {
// AnimeMetadata can be nil, if not provided, it will be fetched from the metadata provider.
AnimeMetadata *metadata.AnimeMetadata
Media *anilist.BaseAnime
MetadataProvider metadata.Provider
Logger *zerolog.Logger
}
// NewEpisodeCollection creates a new episode collection by leveraging EntryDownloadInfo.
// The returned EpisodeCollection is cached for 6 hours.
//
// AnimeMetadata is optional, if not provided, it will be fetched from the metadata provider.
//
// Note: This is used by Torrent and Debrid streaming
func NewEpisodeCollection(opts NewEpisodeCollectionOptions) (ec *EpisodeCollection, err error) {
if opts.Logger == nil {
opts.Logger = lo.ToPtr(zerolog.Nop())
}
if opts.Media == nil {
return nil, fmt.Errorf("cannont create episode collectiom, media is nil")
}
if opts.MetadataProvider == nil {
return nil, fmt.Errorf("cannot create episode collection, metadata provider is nil")
}
if ec, ok := episodeCollectionCache.Get(opts.Media.ID); ok {
opts.Logger.Debug().Msg("torrentstream: Using cached episode collection")
return ec, nil
}
if opts.AnimeMetadata == nil {
// Fetch the metadata
opts.AnimeMetadata, err = opts.MetadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, opts.Media.ID)
if err != nil {
opts.AnimeMetadata = &metadata.AnimeMetadata{
Titles: make(map[string]string),
Episodes: make(map[string]*metadata.EpisodeMetadata),
EpisodeCount: 0,
SpecialCount: 0,
Mappings: &metadata.AnimeMappings{
AnilistId: opts.Media.GetID(),
},
}
opts.AnimeMetadata.Titles["en"] = opts.Media.GetTitleSafe()
opts.AnimeMetadata.Titles["x-jat"] = opts.Media.GetRomajiTitleSafe()
err = nil
}
}
reqEvent := &AnimeEpisodeCollectionRequestedEvent{
Media: opts.Media,
Metadata: opts.AnimeMetadata,
EpisodeCollection: &EpisodeCollection{},
}
err = hook.GlobalHookManager.OnAnimEpisodeCollectionRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
opts.Media = reqEvent.Media
opts.AnimeMetadata = reqEvent.Metadata
if reqEvent.DefaultPrevented {
return reqEvent.EpisodeCollection, nil
}
ec = &EpisodeCollection{
HasMappingError: false,
Episodes: make([]*Episode, 0),
Metadata: opts.AnimeMetadata,
}
// +---------------------+
// | Download Info |
// +---------------------+
info, err := NewEntryDownloadInfo(&NewEntryDownloadInfoOptions{
LocalFiles: nil,
AnimeMetadata: opts.AnimeMetadata,
Progress: lo.ToPtr(0), // Progress is 0 because we want the entire list
Status: lo.ToPtr(anilist.MediaListStatusCurrent),
Media: opts.Media,
MetadataProvider: opts.MetadataProvider,
})
if err != nil {
opts.Logger.Error().Err(err).Msg("torrentstream: could not get media entry info")
return nil, err
}
// As of v2.8.0, this should never happen, getMediaInfo always returns an anime metadata struct, even if it's not found
// causing NewEntryDownloadInfo to return a valid list of episodes to download
if info == nil || info.EpisodesToDownload == nil {
opts.Logger.Debug().Msg("torrentstream: no episodes found from AniDB, using AniList")
for epIdx := range opts.Media.GetCurrentEpisodeCount() {
episodeNumber := epIdx + 1
mediaWrapper := opts.MetadataProvider.GetAnimeMetadataWrapper(opts.Media, nil)
episodeMetadata := mediaWrapper.GetEpisodeMetadata(episodeNumber)
episode := &Episode{
Type: LocalFileTypeMain,
DisplayTitle: fmt.Sprintf("Episode %d", episodeNumber),
EpisodeTitle: opts.Media.GetPreferredTitle(),
EpisodeNumber: episodeNumber,
AniDBEpisode: fmt.Sprintf("%d", episodeNumber),
AbsoluteEpisodeNumber: episodeNumber,
ProgressNumber: episodeNumber,
LocalFile: nil,
IsDownloaded: false,
EpisodeMetadata: &EpisodeMetadata{
AnidbId: 0,
Image: episodeMetadata.Image,
AirDate: "",
Length: 0,
Summary: "",
Overview: "",
IsFiller: false,
},
FileMetadata: nil,
IsInvalid: false,
MetadataIssue: "",
BaseAnime: opts.Media,
}
ec.Episodes = append(ec.Episodes, episode)
}
ec.HasMappingError = true
return
}
if len(info.EpisodesToDownload) == 0 {
opts.Logger.Error().Msg("torrentstream: no episodes found")
return nil, fmt.Errorf("no episodes found")
}
ec.Episodes = lo.Map(info.EpisodesToDownload, func(episode *EntryDownloadEpisode, i int) *Episode {
return episode.Episode
})
slices.SortStableFunc(ec.Episodes, func(i, j *Episode) int {
return cmp.Compare(i.EpisodeNumber, j.EpisodeNumber)
})
event := &AnimeEpisodeCollectionEvent{
EpisodeCollection: ec,
}
err = hook.GlobalHookManager.OnAnimeEpisodeCollection().Trigger(event)
if err != nil {
return nil, err
}
ec = event.EpisodeCollection
episodeCollectionCache.SetT(opts.Media.ID, ec, time.Minute*10)
return
}
func ClearEpisodeCollectionCache() {
episodeCollectionCache.Clear()
}
/////////
type NewEpisodeCollectionFromLocalFilesOptions struct {
LocalFiles []*LocalFile
Media *anilist.BaseAnime
AnimeCollection *anilist.AnimeCollection
Platform platform.Platform
MetadataProvider metadata.Provider
Logger *zerolog.Logger
}
func NewEpisodeCollectionFromLocalFiles(ctx context.Context, opts NewEpisodeCollectionFromLocalFilesOptions) (*EpisodeCollection, error) {
if opts.Logger == nil {
opts.Logger = lo.ToPtr(zerolog.Nop())
}
if ec, ok := EpisodeCollectionFromLocalFilesCache.Get(opts.Media.GetID()); ok {
return ec, nil
}
// Make sure to keep the local files from the media only
opts.LocalFiles = lo.Filter(opts.LocalFiles, func(lf *LocalFile, i int) bool {
return lf.MediaId == opts.Media.GetID()
})
// Create a new media entry
entry, err := NewEntry(ctx, &NewEntryOptions{
MediaId: opts.Media.GetID(),
LocalFiles: opts.LocalFiles,
AnimeCollection: opts.AnimeCollection,
Platform: opts.Platform,
MetadataProvider: opts.MetadataProvider,
})
if err != nil {
return nil, fmt.Errorf("cannot play local file, could not create entry: %w", err)
}
// Should be cached if it exists
animeMetadata, err := opts.MetadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, opts.Media.ID)
if err != nil {
animeMetadata = &metadata.AnimeMetadata{
Titles: make(map[string]string),
Episodes: make(map[string]*metadata.EpisodeMetadata),
EpisodeCount: 0,
SpecialCount: 0,
Mappings: &metadata.AnimeMappings{
AnilistId: opts.Media.GetID(),
},
}
animeMetadata.Titles["en"] = opts.Media.GetTitleSafe()
animeMetadata.Titles["x-jat"] = opts.Media.GetRomajiTitleSafe()
err = nil
}
ec := &EpisodeCollection{
HasMappingError: false,
Episodes: entry.Episodes,
Metadata: animeMetadata,
}
EpisodeCollectionFromLocalFilesCache.SetT(opts.Media.GetID(), ec, time.Hour*6)
return ec, nil
}
/////////
func (ec *EpisodeCollection) FindEpisodeByNumber(episodeNumber int) (*Episode, bool) {
for _, episode := range ec.Episodes {
if episode.EpisodeNumber == episodeNumber {
return episode, true
}
}
return nil, false
}
func (ec *EpisodeCollection) FindEpisodeByAniDB(anidbEpisode string) (*Episode, bool) {
for _, episode := range ec.Episodes {
if episode.AniDBEpisode == anidbEpisode {
return episode, true
}
}
return nil, false
}
// GetMainLocalFiles returns the *main* local files.
func (ec *EpisodeCollection) GetMainLocalFiles() ([]*Episode, bool) {
ret := make([]*Episode, 0)
for _, episode := range ec.Episodes {
if episode.LocalFile == nil || episode.LocalFile.IsMain() {
ret = append(ret, episode)
}
}
if len(ret) == 0 {
return nil, false
}
return ret, true
}
// FindNextEpisode returns the *main* local file whose episode number is after the given local file.
func (ec *EpisodeCollection) FindNextEpisode(current *Episode) (*Episode, bool) {
episodes, ok := ec.GetMainLocalFiles()
if !ok {
return nil, false
}
// Get the local file whose episode number is after the given local file
var next *Episode
for _, e := range episodes {
if e.GetEpisodeNumber() == current.GetEpisodeNumber()+1 {
next = e
break
}
}
if next == nil {
return nil, false
}
return next, true
}

View File

@@ -0,0 +1,28 @@
package anime
func (e *Episode) GetEpisodeNumber() int {
if e == nil {
return -1
}
return e.EpisodeNumber
}
func (e *Episode) GetProgressNumber() int {
if e == nil {
return -1
}
return e.ProgressNumber
}
func (e *Episode) IsMain() bool {
if e == nil || e.LocalFile == nil {
return false
}
return e.LocalFile.IsMain()
}
func (e *Episode) GetLocalFile() *LocalFile {
if e == nil {
return nil
}
return e.LocalFile
}

View File

@@ -0,0 +1,167 @@
package anime
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook_resolver"
)
/////////////////////////////
// Anime Library Events
/////////////////////////////
// AnimeEntryRequestedEvent is triggered when an anime entry is requested.
// Prevent default to skip the default behavior and return the modified entry.
// This event is triggered before [AnimeEntryEvent].
// If the modified entry is nil, an error will be returned.
type AnimeEntryRequestedEvent struct {
hook_resolver.Event
MediaId int `json:"mediaId"`
LocalFiles []*LocalFile `json:"localFiles"`
AnimeCollection *anilist.AnimeCollection `json:"animeCollection"`
// Empty entry object, will be used if the hook prevents the default behavior
Entry *Entry `json:"entry"`
}
// AnimeEntryEvent is triggered when the media entry is being returned.
// This event is triggered after [AnimeEntryRequestedEvent].
type AnimeEntryEvent struct {
hook_resolver.Event
Entry *Entry `json:"entry"`
}
// AnimeEntryFillerHydrationEvent is triggered when the filler data is being added to the media entry.
// This event is triggered after [AnimeEntryEvent].
// Prevent default to skip the filler data.
type AnimeEntryFillerHydrationEvent struct {
hook_resolver.Event
Entry *Entry `json:"entry"`
}
// AnimeEntryLibraryDataRequestedEvent is triggered when the app requests the library data for a media entry.
// This is triggered before [AnimeEntryLibraryDataEvent].
type AnimeEntryLibraryDataRequestedEvent struct {
hook_resolver.Event
EntryLocalFiles []*LocalFile `json:"entryLocalFiles"`
MediaId int `json:"mediaId"`
CurrentProgress int `json:"currentProgress"`
}
// AnimeEntryLibraryDataEvent is triggered when the library data is being added to the media entry.
// This is triggered after [AnimeEntryLibraryDataRequestedEvent].
type AnimeEntryLibraryDataEvent struct {
hook_resolver.Event
EntryLibraryData *EntryLibraryData `json:"entryLibraryData"`
}
// AnimeEntryManualMatchBeforeSaveEvent is triggered when the user manually matches local files to a media entry.
// Prevent default to skip saving the local files.
type AnimeEntryManualMatchBeforeSaveEvent struct {
hook_resolver.Event
// The media ID chosen by the user
MediaId int `json:"mediaId"`
// The paths of the local files that are being matched
Paths []string `json:"paths"`
// The local files that are being matched
MatchedLocalFiles []*LocalFile `json:"matchedLocalFiles"`
}
// MissingEpisodesRequestedEvent is triggered when the user requests the missing episodes for the entire library.
// Prevent default to skip the default process and return the modified missing episodes.
type MissingEpisodesRequestedEvent struct {
hook_resolver.Event
AnimeCollection *anilist.AnimeCollection `json:"animeCollection"`
LocalFiles []*LocalFile `json:"localFiles"`
SilencedMediaIds []int `json:"silencedMediaIds"`
// Empty missing episodes object, will be used if the hook prevents the default behavior
MissingEpisodes *MissingEpisodes `json:"missingEpisodes"`
}
// MissingEpisodesEvent is triggered when the missing episodes are being returned.
type MissingEpisodesEvent struct {
hook_resolver.Event
MissingEpisodes *MissingEpisodes `json:"missingEpisodes"`
}
/////////////////////////////
// Anime Collection Events
/////////////////////////////
// AnimeLibraryCollectionRequestedEvent is triggered when the user requests the library collection.
// Prevent default to skip the default process and return the modified library collection.
// If the modified library collection is nil, an error will be returned.
type AnimeLibraryCollectionRequestedEvent struct {
hook_resolver.Event
AnimeCollection *anilist.AnimeCollection `json:"animeCollection"`
LocalFiles []*LocalFile `json:"localFiles"`
// Empty library collection object, will be used if the hook prevents the default behavior
LibraryCollection *LibraryCollection `json:"libraryCollection"`
}
// AnimeLibraryCollectionEvent is triggered when the user requests the library collection.
type AnimeLibraryCollectionEvent struct {
hook_resolver.Event
LibraryCollection *LibraryCollection `json:"libraryCollection"`
}
// AnimeLibraryStreamCollectionRequestedEvent is triggered when the user requests the library stream collection.
// This is called when the user enables "Include in library" for either debrid/online/torrent streamings.
type AnimeLibraryStreamCollectionRequestedEvent struct {
hook_resolver.Event
AnimeCollection *anilist.AnimeCollection `json:"animeCollection"`
LibraryCollection *LibraryCollection `json:"libraryCollection"`
}
// AnimeLibraryStreamCollectionEvent is triggered when the library stream collection is being returned.
type AnimeLibraryStreamCollectionEvent struct {
hook_resolver.Event
StreamCollection *StreamCollection `json:"streamCollection"`
}
////////////////////////////////////////
// AnimeEntryDownloadInfoRequestedEvent is triggered when the app requests the download info for a media entry.
// This is triggered before [AnimeEntryDownloadInfoEvent].
type AnimeEntryDownloadInfoRequestedEvent struct {
hook_resolver.Event
LocalFiles []*LocalFile `json:"localFiles"`
AnimeMetadata *metadata.AnimeMetadata
Media *anilist.BaseAnime
Progress *int
Status *anilist.MediaListStatus
// Empty download info object, will be used if the hook prevents the default behavior
EntryDownloadInfo *EntryDownloadInfo `json:"entryDownloadInfo"`
}
// AnimeEntryDownloadInfoEvent is triggered when the download info is being returned.
type AnimeEntryDownloadInfoEvent struct {
hook_resolver.Event
EntryDownloadInfo *EntryDownloadInfo `json:"entryDownloadInfo"`
}
/////////////////////////////////////
// AnimeEpisodeCollectionRequestedEvent is triggered when the episode collection is being requested.
// Prevent default to skip the default behavior and return your own data.
type AnimeEpisodeCollectionRequestedEvent struct {
hook_resolver.Event
Media *anilist.BaseAnime `json:"media"`
Metadata *metadata.AnimeMetadata `json:"metadata"`
// Empty episode collection object, will be used if the hook prevents the default behavior
EpisodeCollection *EpisodeCollection `json:"episodeCollection"`
}
// AnimeEpisodeCollectionEvent is triggered when the episode collection is being returned.
type AnimeEpisodeCollectionEvent struct {
hook_resolver.Event
EpisodeCollection *EpisodeCollection `json:"episodeCollection"`
}
/////////////////////////////////////
// AnimeScheduleItemsEvent is triggered when the schedule items are being returned.
type AnimeScheduleItemsEvent struct {
hook_resolver.Event
AnimeCollection *anilist.AnimeCollection `json:"animeCollection"`
Items []*ScheduleItem `json:"items"`
}

View File

@@ -0,0 +1,139 @@
package anime
import (
"seanime/internal/library/filesystem"
"github.com/5rahim/habari"
)
const (
LocalFileTypeMain LocalFileType = "main" // Main episodes that are trackable
LocalFileTypeSpecial LocalFileType = "special" // OVA, ONA, etc.
LocalFileTypeNC LocalFileType = "nc" // Opening, ending, etc.
)
type (
LocalFileType string
// LocalFile represents a media file on the local filesystem.
// It is used to store information about and state of the file, such as its path, name, and parsed data.
LocalFile struct {
Path string `json:"path"`
Name string `json:"name"`
ParsedData *LocalFileParsedData `json:"parsedInfo"`
ParsedFolderData []*LocalFileParsedData `json:"parsedFolderInfo"`
Metadata *LocalFileMetadata `json:"metadata"`
Locked bool `json:"locked"`
Ignored bool `json:"ignored"` // Unused for now
MediaId int `json:"mediaId"`
}
// LocalFileMetadata holds metadata related to a media episode.
LocalFileMetadata struct {
Episode int `json:"episode"`
AniDBEpisode string `json:"aniDBEpisode"`
Type LocalFileType `json:"type"`
}
// LocalFileParsedData holds parsed data from a media file's name.
// This data is used to identify the media file during the scanning process.
LocalFileParsedData struct {
Original string `json:"original"`
Title string `json:"title,omitempty"`
ReleaseGroup string `json:"releaseGroup,omitempty"`
Season string `json:"season,omitempty"`
SeasonRange []string `json:"seasonRange,omitempty"`
Part string `json:"part,omitempty"`
PartRange []string `json:"partRange,omitempty"`
Episode string `json:"episode,omitempty"`
EpisodeRange []string `json:"episodeRange,omitempty"`
EpisodeTitle string `json:"episodeTitle,omitempty"`
Year string `json:"year,omitempty"`
}
)
// NewLocalFileS creates and returns a reference to a new LocalFile struct.
// It will parse the file's name and its directory names to extract necessary information.
// - opath: The full path to the file.
// - dirPaths: The full paths to the directories that may contain the file. (Library root paths)
func NewLocalFileS(opath string, dirPaths []string) *LocalFile {
info := filesystem.SeparateFilePathS(opath, dirPaths)
return newLocalFile(opath, info)
}
// NewLocalFile creates and returns a reference to a new LocalFile struct.
// It will parse the file's name and its directory names to extract necessary information.
// - opath: The full path to the file.
// - dirPath: The full path to the directory containing the file. (The library root path)
func NewLocalFile(opath, dirPath string) *LocalFile {
info := filesystem.SeparateFilePath(opath, dirPath)
return newLocalFile(opath, info)
}
func newLocalFile(opath string, info *filesystem.SeparatedFilePath) *LocalFile {
// Parse filename
fElements := habari.Parse(info.Filename)
parsedInfo := NewLocalFileParsedData(info.Filename, fElements)
// Parse dir names
parsedFolderInfo := make([]*LocalFileParsedData, 0)
for _, dirname := range info.Dirnames {
if len(dirname) > 0 {
pElements := habari.Parse(dirname)
parsed := NewLocalFileParsedData(dirname, pElements)
parsedFolderInfo = append(parsedFolderInfo, parsed)
}
}
localFile := &LocalFile{
Path: opath,
Name: info.Filename,
ParsedData: parsedInfo,
ParsedFolderData: parsedFolderInfo,
Metadata: &LocalFileMetadata{
Episode: 0,
AniDBEpisode: "",
Type: "",
},
Locked: false,
Ignored: false,
MediaId: 0,
}
return localFile
}
// NewLocalFileParsedData Converts habari.Metadata into LocalFileParsedData, which is more suitable.
func NewLocalFileParsedData(original string, elements *habari.Metadata) *LocalFileParsedData {
i := new(LocalFileParsedData)
i.Original = original
i.Title = elements.FormattedTitle
i.ReleaseGroup = elements.ReleaseGroup
i.EpisodeTitle = elements.EpisodeTitle
i.Year = elements.Year
if len(elements.SeasonNumber) > 0 {
if len(elements.SeasonNumber) == 1 {
i.Season = elements.SeasonNumber[0]
} else {
i.SeasonRange = elements.SeasonNumber
}
}
if len(elements.EpisodeNumber) > 0 {
if len(elements.EpisodeNumber) == 1 {
i.Episode = elements.EpisodeNumber[0]
} else {
i.EpisodeRange = elements.EpisodeNumber
}
}
if len(elements.PartNumber) > 0 {
if len(elements.PartNumber) == 1 {
i.Part = elements.PartNumber[0]
} else {
i.PartRange = elements.PartNumber
}
}
return i
}

View File

@@ -0,0 +1,448 @@
package anime
import (
"bytes"
"fmt"
"path/filepath"
"seanime/internal/util"
"seanime/internal/util/comparison"
"slices"
"strconv"
"strings"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
)
//----------------------------------------------------------------------------------------------------------------------
func (f *LocalFile) IsParsedEpisodeValid() bool {
if f == nil || f.ParsedData == nil {
return false
}
return len(f.ParsedData.Episode) > 0
}
// GetEpisodeNumber returns the metadata episode number.
// This requires the LocalFile to be hydrated.
func (f *LocalFile) GetEpisodeNumber() int {
if f.Metadata == nil {
return -1
}
return f.Metadata.Episode
}
func (f *LocalFile) GetParsedEpisodeTitle() string {
if f.ParsedData == nil {
return ""
}
return f.ParsedData.EpisodeTitle
}
// HasBeenWatched returns whether the episode has been watched.
// This only applies to main episodes.
func (f *LocalFile) HasBeenWatched(progress int) bool {
if f.Metadata == nil {
return false
}
if f.GetEpisodeNumber() == 0 && progress == 0 {
return false
}
return progress >= f.GetEpisodeNumber()
}
// GetType returns the metadata type.
// This requires the LocalFile to be hydrated.
func (f *LocalFile) GetType() LocalFileType {
return f.Metadata.Type
}
// IsMain returns true if the metadata type is LocalFileTypeMain
func (f *LocalFile) IsMain() bool {
return f.Metadata.Type == LocalFileTypeMain
}
// GetMetadata returns the file metadata.
// This requires the LocalFile to be hydrated.
func (f *LocalFile) GetMetadata() *LocalFileMetadata {
return f.Metadata
}
// GetAniDBEpisode returns the metadata AniDB episode number.
// This requires the LocalFile to be hydrated.
func (f *LocalFile) GetAniDBEpisode() string {
return f.Metadata.AniDBEpisode
}
func (f *LocalFile) IsLocked() bool {
return f.Locked
}
func (f *LocalFile) IsIgnored() bool {
return f.Ignored
}
// GetNormalizedPath returns the lowercase path of the LocalFile.
// Use this for comparison.
func (f *LocalFile) GetNormalizedPath() string {
return util.NormalizePath(f.Path)
}
func (f *LocalFile) GetPath() string {
return f.Path
}
func (f *LocalFile) HasSamePath(path string) bool {
return f.GetNormalizedPath() == util.NormalizePath(path)
}
// IsInDir returns true if the LocalFile is in the given directory.
func (f *LocalFile) IsInDir(dirPath string) bool {
dirPath = util.NormalizePath(dirPath)
if !filepath.IsAbs(dirPath) {
return false
}
return strings.HasPrefix(f.GetNormalizedPath(), dirPath)
}
// IsAtRootOf returns true if the LocalFile is at the root of the given directory.
func (f *LocalFile) IsAtRootOf(dirPath string) bool {
dirPath = strings.TrimSuffix(util.NormalizePath(dirPath), "/")
return filepath.ToSlash(filepath.Dir(f.GetNormalizedPath())) == dirPath
}
func (f *LocalFile) Equals(lf *LocalFile) bool {
return util.NormalizePath(f.Path) == util.NormalizePath(lf.Path)
}
func (f *LocalFile) IsIncluded(lfs []*LocalFile) bool {
for _, lf := range lfs {
if f.Equals(lf) {
return true
}
}
return false
}
//----------------------------------------------------------------------------------------------------------------------
// buildTitle concatenates the given strings into a single string.
func buildTitle(vals ...string) string {
buf := bytes.NewBuffer([]byte{})
for i, v := range vals {
buf.WriteString(v)
if i != len(vals)-1 {
buf.WriteString(" ")
}
}
return buf.String()
}
// GetUniqueAnimeTitlesFromLocalFiles returns all parsed anime titles without duplicates, from a slice of LocalFile's.
func GetUniqueAnimeTitlesFromLocalFiles(lfs []*LocalFile) []string {
// Concurrently get title from each local file
titles := lop.Map(lfs, func(file *LocalFile, index int) string {
title := file.GetParsedTitle()
// Some rudimentary exclusions
for _, i := range []string{"SPECIALS", "SPECIAL", "EXTRA", "NC", "OP", "MOVIE", "MOVIES"} {
if strings.ToUpper(title) == i {
return ""
}
}
return title
})
// Keep unique title and filter out empty ones
titles = lo.Filter(lo.Uniq(titles), func(item string, index int) bool {
return len(item) > 0
})
return titles
}
// GetMediaIdsFromLocalFiles returns all media ids from a slice of LocalFile's.
func GetMediaIdsFromLocalFiles(lfs []*LocalFile) []int {
// Group local files by media id
groupedLfs := GroupLocalFilesByMediaID(lfs)
// Get slice of media ids from local files
mIds := make([]int, len(groupedLfs))
for key := range groupedLfs {
if !slices.Contains(mIds, key) {
mIds = append(mIds, key)
}
}
return mIds
}
// GetLocalFilesFromMediaId returns all local files with the given media id.
func GetLocalFilesFromMediaId(lfs []*LocalFile, mId int) []*LocalFile {
return lo.Filter(lfs, func(item *LocalFile, _ int) bool {
return item.MediaId == mId
})
}
// GroupLocalFilesByMediaID returns a map of media id to local files.
func GroupLocalFilesByMediaID(lfs []*LocalFile) (groupedLfs map[int][]*LocalFile) {
groupedLfs = lop.GroupBy(lfs, func(item *LocalFile) int {
return item.MediaId
})
return
}
// IsLocalFileGroupValidEntry checks if there are any main episodes with valid episodes
func IsLocalFileGroupValidEntry(lfs []*LocalFile) bool {
// Check if there are any main episodes with valid parsed data
flag := false
for _, lf := range lfs {
if lf.GetType() == LocalFileTypeMain && lf.IsParsedEpisodeValid() {
flag = true
break
}
}
return flag
}
// FindLatestLocalFileFromGroup returns the "main" episode with the highest episode number.
// Returns false if there are no episodes.
func FindLatestLocalFileFromGroup(lfs []*LocalFile) (*LocalFile, bool) {
// Check if there are any main episodes with valid parsed data
if !IsLocalFileGroupValidEntry(lfs) {
return nil, false
}
if lfs == nil || len(lfs) == 0 {
return nil, false
}
// Get the episode with the highest progress number
latest, found := lo.Find(lfs, func(lf *LocalFile) bool {
return lf.GetType() == LocalFileTypeMain && lf.IsParsedEpisodeValid()
})
if !found {
return nil, false
}
for _, lf := range lfs {
if lf.GetType() == LocalFileTypeMain && lf.GetEpisodeNumber() > latest.GetEpisodeNumber() {
latest = lf
}
}
if latest == nil || latest.GetType() != LocalFileTypeMain {
return nil, false
}
return latest, true
}
func (f *LocalFile) GetParsedData() *LocalFileParsedData {
return f.ParsedData
}
// GetParsedTitle returns the parsed title of the LocalFile. Falls back to the folder title if the file title is empty.
func (f *LocalFile) GetParsedTitle() string {
if len(f.ParsedData.Title) > 0 {
return f.ParsedData.Title
}
if len(f.GetFolderTitle()) > 0 {
return f.GetFolderTitle()
}
return ""
}
func (f *LocalFile) GetFolderTitle() string {
folderTitles := make([]string, 0)
if f.ParsedFolderData != nil && len(f.ParsedFolderData) > 0 {
// Go through each folder data and keep the ones with a title
data := lo.Filter(f.ParsedFolderData, func(fpd *LocalFileParsedData, _ int) bool {
return len(fpd.Title) > 0
})
if len(data) == 0 {
return ""
}
// Get the titles
for _, v := range data {
folderTitles = append(folderTitles, v.Title)
}
// If there are multiple titles, return the one closest to the end
return folderTitles[len(folderTitles)-1]
}
return ""
}
// GetTitleVariations is used for matching.
func (f *LocalFile) GetTitleVariations() []*string {
folderSeason := 0
// Get the season from the folder data
if f.ParsedFolderData != nil && len(f.ParsedFolderData) > 0 {
v, found := lo.Find(f.ParsedFolderData, func(fpd *LocalFileParsedData) bool {
return len(fpd.Season) > 0
})
if found {
if res, ok := util.StringToInt(v.Season); ok {
folderSeason = res
}
}
}
// Get the season from the filename
season := 0
if len(f.ParsedData.Season) > 0 {
if res, ok := util.StringToInt(f.ParsedData.Season); ok {
season = res
}
}
part := 0
// Get the part from the folder data
if f.ParsedFolderData != nil && len(f.ParsedFolderData) > 0 {
v, found := lo.Find(f.ParsedFolderData, func(fpd *LocalFileParsedData) bool {
return len(fpd.Part) > 0
})
if found {
if res, ok := util.StringToInt(v.Season); ok {
part = res
}
}
}
// Devnote: This causes issues when an episode title contains "Part"
//// Get the part from the filename
//if len(f.ParsedData.Part) > 0 {
// if res, ok := util.StringToInt(f.ParsedData.Part); ok {
// part = res
// }
//}
folderTitle := f.GetFolderTitle()
if comparison.ValueContainsIgnoredKeywords(folderTitle) {
folderTitle = ""
}
if len(f.ParsedData.Title) == 0 && len(folderTitle) == 0 {
return make([]*string, 0)
}
titleVariations := make([]string, 0)
bothTitles := len(f.ParsedData.Title) > 0 && len(folderTitle) > 0 // Both titles are present (filename and folder)
noSeasonsOrParts := folderSeason == 0 && season == 0 && part == 0 // No seasons or parts are present
bothTitlesSimilar := bothTitles && strings.Contains(folderTitle, f.ParsedData.Title) // The folder title contains the filename title
eitherSeason := folderSeason > 0 || season > 0 // Either season is present
eitherSeasonFirst := folderSeason == 1 || season == 1 // Either season is 1
// Part
if part > 0 {
if len(folderTitle) > 0 {
titleVariations = append(titleVariations,
buildTitle(folderTitle, "Part", strconv.Itoa(part)),
buildTitle(folderTitle, "Part", util.IntegerToOrdinal(part)),
buildTitle(folderTitle, "Cour", strconv.Itoa(part)),
buildTitle(folderTitle, "Cour", util.IntegerToOrdinal(part)),
)
}
if len(f.ParsedData.Title) > 0 {
titleVariations = append(titleVariations,
buildTitle(f.ParsedData.Title, "Part", strconv.Itoa(part)),
buildTitle(f.ParsedData.Title, "Part", util.IntegerToOrdinal(part)),
buildTitle(f.ParsedData.Title, "Cour", strconv.Itoa(part)),
buildTitle(f.ParsedData.Title, "Cour", util.IntegerToOrdinal(part)),
)
}
}
// Title, no seasons, no parts, or season 1
// e.g. "Bungou Stray Dogs"
// e.g. "Bungou Stray Dogs Season 1"
if noSeasonsOrParts || eitherSeasonFirst {
if len(f.ParsedData.Title) > 0 { // Add filename title
titleVariations = append(titleVariations, f.ParsedData.Title)
}
if len(folderTitle) > 0 { // Both titles are present and similar, add folder title
titleVariations = append(titleVariations, folderTitle)
}
}
// Part & Season
// e.g. "Spy x Family Season 1 Part 2"
if part > 0 && eitherSeason {
if len(folderTitle) > 0 {
if season > 0 {
titleVariations = append(titleVariations,
buildTitle(folderTitle, "Season", strconv.Itoa(season), "Part", strconv.Itoa(part)),
)
} else if folderSeason > 0 {
titleVariations = append(titleVariations,
buildTitle(folderTitle, "Season", strconv.Itoa(folderSeason), "Part", strconv.Itoa(part)),
)
}
}
if len(f.ParsedData.Title) > 0 {
if season > 0 {
titleVariations = append(titleVariations,
buildTitle(f.ParsedData.Title, "Season", strconv.Itoa(season), "Part", strconv.Itoa(part)),
)
} else if folderSeason > 0 {
titleVariations = append(titleVariations,
buildTitle(f.ParsedData.Title, "Season", strconv.Itoa(folderSeason), "Part", strconv.Itoa(part)),
)
}
}
}
// Season is present
if eitherSeason {
arr := make([]string, 0)
seas := folderSeason // Default to folder parsed season
if season > 0 { // Use filename parsed season if present
seas = season
}
// Both titles are present
if bothTitles {
// Add both titles
arr = append(arr, f.ParsedData.Title)
arr = append(arr, folderTitle)
if !bothTitlesSimilar { // Combine both titles if they are not similar
arr = append(arr, fmt.Sprintf("%s %s", folderTitle, f.ParsedData.Title))
}
} else if len(folderTitle) > 0 { // Only folder title is present
arr = append(arr, folderTitle)
} else if len(f.ParsedData.Title) > 0 { // Only filename title is present
arr = append(arr, f.ParsedData.Title)
}
for _, t := range arr {
titleVariations = append(titleVariations,
buildTitle(t, "Season", strconv.Itoa(seas)),
buildTitle(t, "S"+strconv.Itoa(seas)),
buildTitle(t, util.IntegerToOrdinal(seas), "Season"),
)
}
}
titleVariations = lo.Uniq(titleVariations)
// If there are no title variations, use the folder title or the parsed title
if len(titleVariations) == 0 {
if len(folderTitle) > 0 {
titleVariations = append(titleVariations, folderTitle)
}
if len(f.ParsedData.Title) > 0 {
titleVariations = append(titleVariations, f.ParsedData.Title)
}
}
return lo.ToSlicePtr(titleVariations)
}

View File

@@ -0,0 +1,329 @@
package anime_test
import (
"github.com/davecgh/go-spew/spew"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"path/filepath"
"seanime/internal/library/anime"
"seanime/internal/util"
"strings"
"testing"
)
func TestLocalFile_GetNormalizedPath(t *testing.T) {
tests := []struct {
filePath string
libraryPath string
expectedResult string
}{
{
filePath: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
expectedResult: "e:/anime/bungou stray dogs 5th season/bungou stray dogs/[subsplease] bungou stray dogs - 61 (1080p) [f609b947].mkv",
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
expectedResult: "e:/anime/shakugan no shana/shakugan no shana i/opening/op01.mkv",
},
}
for _, tt := range tests {
t.Run(tt.filePath, func(t *testing.T) {
lf := anime.NewLocalFile(tt.filePath, tt.libraryPath)
if assert.NotNil(t, lf) {
if assert.Equal(t, tt.expectedResult, lf.GetNormalizedPath()) {
spew.Dump(lf.GetNormalizedPath())
}
}
})
}
}
func TestLocalFile_IsInDir(t *testing.T) {
tests := []struct {
filePath string
libraryPath string
dir string
expectedResult bool
}{
{
filePath: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
dir: "E:/ANIME/Bungou Stray Dogs 5th Season",
expectedResult: true,
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
dir: "E:/ANIME/Shakugan No Shana",
expectedResult: true,
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
dir: "E:/ANIME/Shakugan No Shana I",
expectedResult: false,
},
}
for _, tt := range tests {
t.Run(tt.filePath, func(t *testing.T) {
lf := anime.NewLocalFile(tt.filePath, tt.libraryPath)
if assert.NotNil(t, lf) {
if assert.Equal(t, tt.expectedResult, lf.IsInDir(tt.dir)) {
spew.Dump(lf.IsInDir(tt.dir))
}
}
})
}
}
func TestLocalFile_IsAtRootOf(t *testing.T) {
tests := []struct {
filePath string
libraryPath string
dir string
expectedResult bool
}{
{
filePath: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
dir: "E:/ANIME/Bungou Stray Dogs 5th Season",
expectedResult: false,
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
dir: "E:/ANIME/Shakugan No Shana",
expectedResult: false,
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
dir: "E:/ANIME/Shakugan No Shana/Shakugan No Shana I/Opening",
expectedResult: true,
},
}
for _, tt := range tests {
t.Run(tt.filePath, func(t *testing.T) {
lf := anime.NewLocalFile(tt.filePath, tt.libraryPath)
if assert.NotNil(t, lf) {
if !assert.Equal(t, tt.expectedResult, lf.IsAtRootOf(tt.dir)) {
t.Log(filepath.Dir(lf.GetNormalizedPath()))
t.Log(strings.TrimSuffix(util.NormalizePath(tt.dir), "/"))
}
}
})
}
}
func TestLocalFile_Equals(t *testing.T) {
tests := []struct {
filePath1 string
filePath2 string
libraryPath string
expectedResult bool
}{
{
filePath1: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
filePath2: "E:/ANIME/Bungou Stray Dogs 5th Season/Bungou Stray Dogs/[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/Anime",
expectedResult: true,
},
{
filePath1: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
filePath2: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 62 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
expectedResult: false,
},
}
for _, tt := range tests {
t.Run(tt.filePath1, func(t *testing.T) {
lf1 := anime.NewLocalFile(tt.filePath1, tt.libraryPath)
lf2 := anime.NewLocalFile(tt.filePath2, tt.libraryPath)
if assert.NotNil(t, lf1) && assert.NotNil(t, lf2) {
assert.Equal(t, tt.expectedResult, lf1.Equals(lf2))
}
})
}
}
func TestLocalFile_GetTitleVariations(t *testing.T) {
tests := []struct {
filePath string
libraryPath string
expectedTitles []string
}{
{
filePath: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Bungou Stray Dogs 5th Season",
"Bungou Stray Dogs Season 5",
"Bungou Stray Dogs S5",
},
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Shakugan No Shana I",
},
},
{
filePath: "E:\\ANIME\\Neon Genesis Evangelion Death & Rebirth\\[Anime Time] Neon Genesis Evangelion - Rebirth.mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Neon Genesis Evangelion - Rebirth",
"Neon Genesis Evangelion Death & Rebirth",
},
},
{
filePath: "E:\\ANIME\\Omoi, Omoware, Furi, Furare\\[GJM] Love Me, Love Me Not (BD 1080p) [841C23CD].mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Love Me, Love Me Not",
"Omoi, Omoware, Furi, Furare",
},
},
{
filePath: "E:\\ANIME\\Violet Evergarden Gaiden Eien to Jidou Shuki Ningyou\\Violet.Evergarden.Gaiden.2019.1080..Dual.Audio.BDRip.10.bits.DD.x265-EMBER.mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Violet Evergarden Gaiden Eien to Jidou Shuki Ningyou",
"Violet Evergarden Gaiden 2019",
},
},
{
filePath: "E:\\ANIME\\Violet Evergarden S01+Movies+OVA 1080p Dual Audio BDRip 10 bits DD x265-EMBER\\01. Season 1 + OVA\\S01E01-'I Love You' and Auto Memory Dolls [F03E1F7A].mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Violet Evergarden",
"Violet Evergarden S1",
"Violet Evergarden Season 1",
"Violet Evergarden 1st Season",
},
},
{
filePath: "E:\\ANIME\\Golden Kamuy 4th Season\\[Judas] Golden Kamuy (Season 4) [1080p][HEVC x265 10bit][Multi-Subs]\\[Judas] Golden Kamuy - S04E01.mkv",
libraryPath: "E:/ANIME",
expectedTitles: []string{
"Golden Kamuy S4",
"Golden Kamuy Season 4",
"Golden Kamuy 4th Season",
},
},
}
for _, tt := range tests {
t.Run(tt.filePath, func(t *testing.T) {
lf := anime.NewLocalFile(tt.filePath, tt.libraryPath)
if assert.NotNil(t, lf) {
tv := lo.Map(lf.GetTitleVariations(), func(item *string, _ int) string { return *item })
if assert.ElementsMatch(t, tt.expectedTitles, tv) {
spew.Dump(lf.GetTitleVariations())
}
}
})
}
}
func TestLocalFile_GetParsedTitle(t *testing.T) {
tests := []struct {
filePath string
libraryPath string
expectedParsedTitle string
}{
{
filePath: "E:\\Anime\\Bungou Stray Dogs 5th Season\\Bungou Stray Dogs\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
expectedParsedTitle: "Bungou Stray Dogs",
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
expectedParsedTitle: "Shakugan No Shana I",
},
}
for _, tt := range tests {
t.Run(tt.filePath, func(t *testing.T) {
lf := anime.NewLocalFile(tt.filePath, tt.libraryPath)
if assert.NotNil(t, lf) {
if assert.Equal(t, tt.expectedParsedTitle, lf.GetParsedTitle()) {
spew.Dump(lf.GetParsedTitle())
}
}
})
}
}
func TestLocalFile_GetFolderTitle(t *testing.T) {
tests := []struct {
filePath string
libraryPath string
expectedFolderTitle string
}{
{
filePath: "E:\\Anime\\Bungou Stray Dogs 5th Season\\S05E11 - Episode Title.mkv",
libraryPath: "E:/ANIME",
expectedFolderTitle: "Bungou Stray Dogs",
},
{
filePath: "E:\\Anime\\Shakugan No Shana\\Shakugan No Shana I\\Opening\\OP01.mkv",
libraryPath: "E:/ANIME",
expectedFolderTitle: "Shakugan No Shana I",
},
}
for _, tt := range tests {
t.Run(tt.filePath, func(t *testing.T) {
lf := anime.NewLocalFile(tt.filePath, tt.libraryPath)
if assert.NotNil(t, lf) {
if assert.Equal(t, tt.expectedFolderTitle, lf.GetFolderTitle()) {
spew.Dump(lf.GetFolderTitle())
}
}
})
}
}

View File

@@ -0,0 +1,59 @@
package anime_test
import (
"runtime"
"seanime/internal/library/anime"
"testing"
"github.com/stretchr/testify/assert"
)
func TestNewLocalFile(t *testing.T) {
tests := []struct {
path string
libraryPath string
expectedNbFolders int
expectedFilename string
os string
}{
{
path: "E:\\Anime\\Bungou Stray Dogs 5th Season\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:\\Anime",
expectedFilename: "[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
expectedNbFolders: 1,
os: "windows",
},
{
path: "E:\\Anime\\Bungou Stray Dogs 5th Season\\[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "E:/ANIME",
expectedFilename: "[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
expectedNbFolders: 1,
os: "windows",
},
{
path: "/mnt/Anime/Bungou Stray Dogs/Bungou Stray Dogs 5th Season/[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
libraryPath: "/mnt/Anime",
expectedFilename: "[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
expectedNbFolders: 2,
os: "",
},
}
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
if tt.os != "" {
if tt.os != runtime.GOOS {
t.Skipf("skipping test for %s", tt.path)
}
}
lf := anime.NewLocalFile(tt.path, tt.libraryPath)
if assert.NotNil(t, lf) {
assert.Equal(t, tt.expectedNbFolders, len(lf.ParsedFolderData))
assert.Equal(t, tt.expectedFilename, lf.Name)
}
})
}
}

View File

@@ -0,0 +1,207 @@
package anime
import (
"cmp"
"slices"
)
type (
// LocalFileWrapper takes a slice of LocalFiles and provides helper methods.
LocalFileWrapper struct {
LocalFiles []*LocalFile `json:"localFiles"`
LocalEntries []*LocalFileWrapperEntry `json:"localEntries"`
UnmatchedLocalFiles []*LocalFile `json:"unmatchedLocalFiles"`
}
LocalFileWrapperEntry struct {
MediaId int `json:"mediaId"`
LocalFiles []*LocalFile `json:"localFiles"`
}
)
// NewLocalFileWrapper creates and returns a reference to a new LocalFileWrapper
func NewLocalFileWrapper(lfs []*LocalFile) *LocalFileWrapper {
lfw := &LocalFileWrapper{
LocalFiles: lfs,
LocalEntries: make([]*LocalFileWrapperEntry, 0),
UnmatchedLocalFiles: make([]*LocalFile, 0),
}
// Group local files by media id
groupedLfs := GroupLocalFilesByMediaID(lfs)
for mId, gLfs := range groupedLfs {
if mId == 0 {
lfw.UnmatchedLocalFiles = gLfs
continue
}
lfw.LocalEntries = append(lfw.LocalEntries, &LocalFileWrapperEntry{
MediaId: mId,
LocalFiles: gLfs,
})
}
return lfw
}
func (lfw *LocalFileWrapper) GetLocalEntryById(mId int) (*LocalFileWrapperEntry, bool) {
for _, me := range lfw.LocalEntries {
if me.MediaId == mId {
return me, true
}
}
return nil, false
}
// GetMainLocalFiles returns the *main* local files.
func (e *LocalFileWrapperEntry) GetMainLocalFiles() ([]*LocalFile, bool) {
lfs := make([]*LocalFile, 0)
for _, lf := range e.LocalFiles {
if lf.IsMain() {
lfs = append(lfs, lf)
}
}
if len(lfs) == 0 {
return nil, false
}
return lfs, true
}
// GetUnwatchedLocalFiles returns the *main* local files that have not been watched.
// It returns an empty slice if all local files have been watched.
//
// /!\ IF Episode 0 is present, progress will be decremented by 1. This is because we assume AniList includes the episode 0 in the total count.
func (e *LocalFileWrapperEntry) GetUnwatchedLocalFiles(progress int) []*LocalFile {
ret := make([]*LocalFile, 0)
lfs, ok := e.GetMainLocalFiles()
if !ok {
return ret
}
for _, lf := range lfs {
if lf.GetEpisodeNumber() == 0 {
progress = progress - 1
break
}
}
for _, lf := range lfs {
if lf.GetEpisodeNumber() > progress {
ret = append(ret, lf)
}
}
return ret
}
// GetFirstUnwatchedLocalFiles is like GetUnwatchedLocalFiles but returns local file with the lowest episode number.
func (e *LocalFileWrapperEntry) GetFirstUnwatchedLocalFiles(progress int) (*LocalFile, bool) {
lfs := e.GetUnwatchedLocalFiles(progress)
if len(lfs) == 0 {
return nil, false
}
// Sort local files by episode number
slices.SortStableFunc(lfs, func(a, b *LocalFile) int {
return cmp.Compare(a.GetEpisodeNumber(), b.GetEpisodeNumber())
})
return lfs[0], true
}
// HasMainLocalFiles returns true if there are any *main* local files.
func (e *LocalFileWrapperEntry) HasMainLocalFiles() bool {
for _, lf := range e.LocalFiles {
if lf.IsMain() {
return true
}
}
return false
}
// FindLocalFileWithEpisodeNumber returns the *main* local file with the given episode number.
func (e *LocalFileWrapperEntry) FindLocalFileWithEpisodeNumber(ep int) (*LocalFile, bool) {
for _, lf := range e.LocalFiles {
if !lf.IsMain() {
continue
}
if lf.GetEpisodeNumber() == ep {
return lf, true
}
}
return nil, false
}
// FindLatestLocalFile returns the *main* local file with the highest episode number.
func (e *LocalFileWrapperEntry) FindLatestLocalFile() (*LocalFile, bool) {
lfs, ok := e.GetMainLocalFiles()
if !ok {
return nil, false
}
// Get the local file with the highest episode number
latest := lfs[0]
for _, lf := range lfs {
if lf.GetEpisodeNumber() > latest.GetEpisodeNumber() {
latest = lf
}
}
return latest, true
}
// FindNextEpisode returns the *main* local file whose episode number is after the given local file.
func (e *LocalFileWrapperEntry) FindNextEpisode(lf *LocalFile) (*LocalFile, bool) {
lfs, ok := e.GetMainLocalFiles()
if !ok {
return nil, false
}
// Get the local file whose episode number is after the given local file
var next *LocalFile
for _, l := range lfs {
if l.GetEpisodeNumber() == lf.GetEpisodeNumber()+1 {
next = l
break
}
}
if next == nil {
return nil, false
}
return next, true
}
// GetProgressNumber returns the progress number of a **main** local file.
func (e *LocalFileWrapperEntry) GetProgressNumber(lf *LocalFile) int {
lfs, ok := e.GetMainLocalFiles()
if !ok {
return 0
}
var hasEpZero bool
for _, l := range lfs {
if l.GetEpisodeNumber() == 0 {
hasEpZero = true
break
}
}
if hasEpZero {
return lf.GetEpisodeNumber() + 1
}
return lf.GetEpisodeNumber()
}
func (lfw *LocalFileWrapper) GetUnmatchedLocalFiles() []*LocalFile {
return lfw.UnmatchedLocalFiles
}
func (lfw *LocalFileWrapper) GetLocalEntries() []*LocalFileWrapperEntry {
return lfw.LocalEntries
}
func (lfw *LocalFileWrapper) GetLocalFiles() []*LocalFile {
return lfw.LocalFiles
}
func (e *LocalFileWrapperEntry) GetLocalFiles() []*LocalFile {
return e.LocalFiles
}
func (e *LocalFileWrapperEntry) GetMediaId() int {
return e.MediaId
}

View File

@@ -0,0 +1,194 @@
package anime_test
import (
"cmp"
"github.com/stretchr/testify/assert"
"seanime/internal/library/anime"
"slices"
"testing"
)
func TestLocalFileWrapperEntry(t *testing.T) {
lfs := anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/One Piece/One Piece - %ep.mkv", 21, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1070, MetadataAniDbEpisode: "1070", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1071, MetadataAniDbEpisode: "1071", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1072, MetadataAniDbEpisode: "1072", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1073, MetadataAniDbEpisode: "1073", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1074, MetadataAniDbEpisode: "1074", MetadataType: anime.LocalFileTypeMain},
}),
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Blue Lock/Blue Lock - %ep.mkv", 22222, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
}),
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv", 9656, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 0, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
}),
)
tests := []struct {
name string
mediaId int
expectedNbMainLocalFiles int
expectedLatestEpisode int
expectedEpisodeNumberAfterEpisode []int
}{
{
name: "One Piece",
mediaId: 21,
expectedNbMainLocalFiles: 5,
expectedLatestEpisode: 1074,
expectedEpisodeNumberAfterEpisode: []int{1071, 1072},
},
{
name: "Blue Lock",
mediaId: 22222,
expectedNbMainLocalFiles: 3,
expectedLatestEpisode: 3,
expectedEpisodeNumberAfterEpisode: []int{2, 3},
},
}
lfw := anime.NewLocalFileWrapper(lfs)
// Not empty
if assert.Greater(t, len(lfw.GetLocalEntries()), 0) {
for _, tt := range tests {
// Can get by id
entry, ok := lfw.GetLocalEntryById(tt.mediaId)
if assert.Truef(t, ok, "could not find entry for %s", tt.name) {
assert.Equalf(t, tt.mediaId, entry.GetMediaId(), "media id does not match for %s", tt.name)
// Can get main local files
mainLfs, ok := entry.GetMainLocalFiles()
if assert.Truef(t, ok, "could not find main local files for %s", tt.name) {
// Number of main local files matches
assert.Equalf(t, tt.expectedNbMainLocalFiles, len(mainLfs), "number of main local files does not match for %s", tt.name)
// Can find latest episode
latest, ok := entry.FindLatestLocalFile()
if assert.Truef(t, ok, "could not find latest local file for %s", tt.name) {
assert.Equalf(t, tt.expectedLatestEpisode, latest.GetEpisodeNumber(), "latest episode does not match for %s", tt.name)
}
// Can find successive episodes
firstEp, ok := entry.FindLocalFileWithEpisodeNumber(tt.expectedEpisodeNumberAfterEpisode[0])
if assert.True(t, ok) {
secondEp, ok := entry.FindNextEpisode(firstEp)
if assert.True(t, ok) {
assert.Equal(t, tt.expectedEpisodeNumberAfterEpisode[1], secondEp.GetEpisodeNumber(), "second episode does not match for %s", tt.name)
}
}
}
}
}
}
}
func TestLocalFileWrapperEntryProgressNumber(t *testing.T) {
lfs := anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv", 9656, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 0, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
}),
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv", 9656_2, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
}),
)
tests := []struct {
name string
mediaId int
expectedNbMainLocalFiles int
expectedLatestEpisode int
expectedEpisodeNumberAfterEpisode []int
expectedProgressNumbers []int
}{
{
name: "Kimi ni Todoke",
mediaId: 9656,
expectedNbMainLocalFiles: 3,
expectedLatestEpisode: 2,
expectedEpisodeNumberAfterEpisode: []int{1, 2},
expectedProgressNumbers: []int{1, 2, 3}, // S1 -> 1, 1 -> 2, 2 -> 3
},
{
name: "Kimi ni Todoke 2",
mediaId: 9656_2,
expectedNbMainLocalFiles: 3,
expectedLatestEpisode: 3,
expectedEpisodeNumberAfterEpisode: []int{2, 3},
expectedProgressNumbers: []int{1, 2, 3}, // S1 -> 1, 1 -> 2, 2 -> 3
},
}
lfw := anime.NewLocalFileWrapper(lfs)
// Not empty
if assert.Greater(t, len(lfw.GetLocalEntries()), 0) {
for _, tt := range tests {
// Can get by id
entry, ok := lfw.GetLocalEntryById(tt.mediaId)
if assert.Truef(t, ok, "could not find entry for %s", tt.name) {
assert.Equalf(t, tt.mediaId, entry.GetMediaId(), "media id does not match for %s", tt.name)
// Can get main local files
mainLfs, ok := entry.GetMainLocalFiles()
if assert.Truef(t, ok, "could not find main local files for %s", tt.name) {
// Number of main local files matches
assert.Equalf(t, tt.expectedNbMainLocalFiles, len(mainLfs), "number of main local files does not match for %s", tt.name)
// Can find latest episode
latest, ok := entry.FindLatestLocalFile()
if assert.Truef(t, ok, "could not find latest local file for %s", tt.name) {
assert.Equalf(t, tt.expectedLatestEpisode, latest.GetEpisodeNumber(), "latest episode does not match for %s", tt.name)
}
// Can find successive episodes
firstEp, ok := entry.FindLocalFileWithEpisodeNumber(tt.expectedEpisodeNumberAfterEpisode[0])
if assert.True(t, ok) {
secondEp, ok := entry.FindNextEpisode(firstEp)
if assert.True(t, ok) {
assert.Equal(t, tt.expectedEpisodeNumberAfterEpisode[1], secondEp.GetEpisodeNumber(), "second episode does not match for %s", tt.name)
}
}
slices.SortStableFunc(mainLfs, func(i *anime.LocalFile, j *anime.LocalFile) int {
return cmp.Compare(i.GetEpisodeNumber(), j.GetEpisodeNumber())
})
for idx, lf := range mainLfs {
progressNum := entry.GetProgressNumber(lf)
assert.Equalf(t, tt.expectedProgressNumbers[idx], progressNum, "progress number does not match for %s", tt.name)
}
}
}
}
}
}

View File

@@ -0,0 +1,591 @@
{
"154587": {
"localFiles": [
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 01 (1080p) [F02B9CEE].mkv",
"name": "[SubsPlease] Sousou no Frieren - 01 (1080p) [F02B9CEE].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 01 (1080p) [F02B9CEE].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "01"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 1,
"aniDBEpisode": "1",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 02 (1080p) [E5A85899].mkv",
"name": "[SubsPlease] Sousou no Frieren - 02 (1080p) [E5A85899].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 02 (1080p) [E5A85899].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "02"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 2,
"aniDBEpisode": "2",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 03 (1080p) [7EF3F175].mkv",
"name": "[SubsPlease] Sousou no Frieren - 03 (1080p) [7EF3F175].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 03 (1080p) [7EF3F175].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "03"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 3,
"aniDBEpisode": "3",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 04 (1080p) [5ED46803].mkv",
"name": "[SubsPlease] Sousou no Frieren - 04 (1080p) [5ED46803].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 04 (1080p) [5ED46803].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "04"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 4,
"aniDBEpisode": "4",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 05 (1080p) [8E3F8FA5].mkv",
"name": "[SubsPlease] Sousou no Frieren - 05 (1080p) [8E3F8FA5].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 05 (1080p) [8E3F8FA5].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "05"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 5,
"aniDBEpisode": "5",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
}
],
"animeCollection": {
"MediaListCollection": {
"lists": [
{
"status": "CURRENT",
"entries": [
{
"id": 366875178,
"score": 9,
"progress": 4,
"status": "CURRENT",
"repeat": 0,
"private": false,
"startedAt": {
"year": 2023,
"month": 10
},
"completedAt": {},
"media": {
"id": 154587,
"idMal": 52991,
"siteUrl": "https://anilist.co/anime/154587",
"status": "RELEASING",
"season": "FALL",
"type": "ANIME",
"format": "TV",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/154587-ivXNJ23SM1xB.jpg",
"episodes": 28,
"synonyms": [
"Frieren at the Funeral",
"장송의 프리렌",
"Frieren: Oltre la Fine del Viaggio",
"คำอธิษฐานในวันที่จากลา Frieren",
"Frieren e a Jornada para o Além",
"Frieren Nach dem Ende der Reise",
"葬送的芙莉蓮",
"Frieren: Más allá del final del viaje",
"Frieren en el funeral",
"Sōsō no Furīren",
"Frieren. U kresu drogi",
"Frieren - Pháp sư tiễn táng",
"Фрирен, провожающая в последний путь"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Sousou no Frieren",
"romaji": "Sousou no Frieren",
"english": "Frieren: Beyond Journeys End",
"native": "葬送のフリーレン"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx154587-n1fmjRv4JQUd.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx154587-n1fmjRv4JQUd.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx154587-n1fmjRv4JQUd.jpg",
"color": "#d6f1c9"
},
"startDate": {
"year": 2023,
"month": 9,
"day": 29
},
"endDate": {},
"nextAiringEpisode": {
"airingAt": 1700229600,
"timeUntilAiring": 223940,
"episode": 11
},
"relations": {
"edges": [
{
"relationType": "SOURCE",
"node": {
"id": 118586,
"idMal": 126287,
"siteUrl": "https://anilist.co/manga/118586",
"status": "RELEASING",
"type": "MANGA",
"format": "MANGA",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/manga/banner/118586-1JLJiwaIlnBp.jpg",
"synonyms": [
"Frieren at the Funeral",
"장송의 프리렌",
"Frieren: Oltre la Fine del Viaggio",
"คำอธิษฐานในวันที่จากลา Frieren",
"Frieren e a Jornada para o Além",
"Frieren Nach dem Ende der Reise",
"葬送的芙莉蓮",
"Frieren After \"The End\"",
"Frieren: Remnants of the Departed",
"Frieren. U kresu drogi",
"Frieren",
"FRIEREN: Más allá del fin del viaje"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Sousou no Frieren",
"romaji": "Sousou no Frieren",
"english": "Frieren: Beyond Journeys End",
"native": "葬送のフリーレン"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/bx118586-F0Lp86XQV7du.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx118586-F0Lp86XQV7du.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/bx118586-F0Lp86XQV7du.jpg",
"color": "#e4ae5d"
},
"startDate": {
"year": 2020,
"month": 4,
"day": 28
},
"endDate": {}
}
},
{
"relationType": "CHARACTER",
"node": {
"id": 169811,
"idMal": 56805,
"siteUrl": "https://anilist.co/anime/169811",
"status": "FINISHED",
"type": "ANIME",
"format": "MUSIC",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/169811-jgMVZlIdH19a.jpg",
"episodes": 1,
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Yuusha",
"romaji": "Yuusha",
"native": "勇者"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx169811-H0RW7WHkRlbH.png",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx169811-H0RW7WHkRlbH.png",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx169811-H0RW7WHkRlbH.png"
},
"startDate": {
"year": 2023,
"month": 9,
"day": 29
},
"endDate": {
"year": 2023,
"month": 9,
"day": 29
}
}
},
{
"relationType": "SIDE_STORY",
"node": {
"id": 170068,
"idMal": 56885,
"siteUrl": "https://anilist.co/anime/170068",
"status": "RELEASING",
"season": "FALL",
"type": "ANIME",
"format": "ONA",
"synonyms": [
"Sousou no Frieren Mini Anime",
"Frieren: Beyond Journeys End Mini Anime",
"葬送のフリーレン ミニアニメ"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Sousou no Frieren: ●● no Mahou",
"romaji": "Sousou no Frieren: ●● no Mahou",
"native": "葬送のフリーレン ~●●の魔法~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx170068-ijY3tCP8KoWP.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx170068-ijY3tCP8KoWP.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx170068-ijY3tCP8KoWP.jpg",
"color": "#bbd678"
},
"startDate": {
"year": 2023,
"month": 10,
"day": 11
},
"endDate": {}
}
}
]
}
}
}
]
}
]
}
}
},
"146065": {
"localFiles": [],
"animeCollection": {
"MediaListCollection": {
"lists": [
{
"status": "CURRENT",
"entries": [
{
"id": 366466419,
"score": 0,
"progress": 0,
"status": "CURRENT",
"repeat": 0,
"private": false,
"startedAt": {
"year": 2023,
"month": 10,
"day": 4
},
"completedAt": {
"year": 2023,
"month": 10,
"day": 9
},
"media": {
"id": 146065,
"idMal": 51179,
"siteUrl": "https://anilist.co/anime/146065",
"status": "FINISHED",
"season": "SUMMER",
"type": "ANIME",
"format": "TV",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/146065-33RDijfuxLLk.jpg",
"episodes": 13,
"synonyms": [
"ชาตินี้พี่ต้องเทพ ภาค 2",
"Mushoku Tensei: Isekai Ittara Honki Dasu 2nd Season",
"Mushoku Tensei II: Jobless Reincarnation",
"Mushoku Tensei II: Reencarnación desde cero",
"无职转生到了异世界就拿出真本事第2季"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei II: Isekai Ittara Honki Dasu",
"romaji": "Mushoku Tensei II: Isekai Ittara Honki Dasu",
"english": "Mushoku Tensei: Jobless Reincarnation Season 2",
"native": "無職転生 Ⅱ ~異世界行ったら本気だす~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx146065-IjirxRK26O03.png",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx146065-IjirxRK26O03.png",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx146065-IjirxRK26O03.png",
"color": "#35aee4"
},
"startDate": {
"year": 2023,
"month": 7,
"day": 3
},
"endDate": {
"year": 2023,
"month": 9,
"day": 25
},
"relations": {
"edges": [
{
"relationType": "SOURCE",
"node": {
"id": 85470,
"idMal": 70261,
"siteUrl": "https://anilist.co/manga/85470",
"status": "FINISHED",
"type": "MANGA",
"format": "NOVEL",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/manga/banner/85470-akkFSKH9aacB.jpg",
"synonyms": [
"เกิดชาตินี้พี่ต้องเทพ"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"english": "Mushoku Tensei: Jobless Reincarnation",
"native": "無職転生 ~異世界行ったら本気だす~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/nx85470-jt6BF9tDWB2X.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/nx85470-jt6BF9tDWB2X.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/nx85470-jt6BF9tDWB2X.jpg",
"color": "#f1bb1a"
},
"startDate": {
"year": 2014,
"month": 1,
"day": 23
},
"endDate": {
"year": 2022,
"month": 11,
"day": 25
}
}
},
{
"relationType": "ALTERNATIVE",
"node": {
"id": 85564,
"idMal": 70259,
"siteUrl": "https://anilist.co/manga/85564",
"status": "RELEASING",
"type": "MANGA",
"format": "MANGA",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/manga/banner/85564-Wy8IQU3Km61c.jpg",
"synonyms": [
"Mushoku Tensei: Uma segunda chance"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"english": "Mushoku Tensei: Jobless Reincarnation",
"native": "無職転生 ~異世界行ったら本気だす~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/bx85564-egXRASF0x9B9.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx85564-egXRASF0x9B9.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/bx85564-egXRASF0x9B9.jpg",
"color": "#e4ae0d"
},
"startDate": {
"year": 2014,
"month": 5,
"day": 2
},
"endDate": {}
}
},
{
"relationType": "PREQUEL",
"node": {
"id": 127720,
"idMal": 45576,
"siteUrl": "https://anilist.co/anime/127720",
"status": "FINISHED",
"season": "FALL",
"type": "ANIME",
"format": "TV",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/127720-oBpHiMWQhFVN.jpg",
"episodes": 12,
"synonyms": [
"Mushoku Tensei: Jobless Reincarnation Part 2",
"ชาตินี้พี่ต้องเทพ พาร์ท 2"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu Part 2",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu Part 2",
"english": "Mushoku Tensei: Jobless Reincarnation Cour 2",
"native": "無職転生 ~異世界行ったら本気だす~ 第2クール"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx127720-ADJgIrUVMdU9.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx127720-ADJgIrUVMdU9.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx127720-ADJgIrUVMdU9.jpg",
"color": "#d6bb1a"
},
"startDate": {
"year": 2021,
"month": 10,
"day": 4
},
"endDate": {
"year": 2021,
"month": 12,
"day": 20
}
}
},
{
"relationType": "ALTERNATIVE",
"node": {
"id": 142989,
"idMal": 142765,
"siteUrl": "https://anilist.co/manga/142989",
"status": "RELEASING",
"type": "MANGA",
"format": "MANGA",
"synonyms": [
"Mushoku Tensei - Depressed Magician"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu - Shitsui no Majutsushi-hen",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu - Shitsui no Majutsushi-hen",
"native": "無職転生 ~異世界行ったら本気だす~ 失意の魔術師編"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/bx142989-jYDNHLwdER70.png",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx142989-jYDNHLwdER70.png",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/bx142989-jYDNHLwdER70.png",
"color": "#e4bb28"
},
"startDate": {
"year": 2021,
"month": 12,
"day": 20
},
"endDate": {}
}
},
{
"relationType": "SEQUEL",
"node": {
"id": 166873,
"idMal": 55888,
"siteUrl": "https://anilist.co/anime/166873",
"status": "NOT_YET_RELEASED",
"season": "SPRING",
"type": "ANIME",
"format": "TV",
"episodes": 12,
"synonyms": [
"Mushoku Tensei: Jobless Reincarnation Season 2 Part 2",
"ชาตินี้พี่ต้องเทพ ภาค 2",
"Mushoku Tensei: Isekai Ittara Honki Dasu 2nd Season Part 2",
"Mushoku Tensei II: Jobless Reincarnation Part 2",
"Mushoku Tensei II: Reencarnación desde cero",
"无职转生到了异世界就拿出真本事第2季"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei II: Isekai Ittara Honki Dasu Part 2",
"romaji": "Mushoku Tensei II: Isekai Ittara Honki Dasu Part 2",
"native": "無職転生 Ⅱ ~異世界行ったら本気だす~ 第2クール"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx166873-cqMLPB00KcEI.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx166873-cqMLPB00KcEI.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx166873-cqMLPB00KcEI.jpg",
"color": "#6b501a"
},
"startDate": {
"year": 2024,
"month": 4
},
"endDate": {
"year": 2024,
"month": 6
}
}
}
]
}
}
}
]
}
]
}
}
}
}

View File

@@ -0,0 +1,154 @@
package anime
import (
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/util/limiter"
"sort"
"time"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"github.com/sourcegraph/conc/pool"
)
type (
MissingEpisodes struct {
Episodes []*Episode `json:"episodes"`
SilencedEpisodes []*Episode `json:"silencedEpisodes"`
}
NewMissingEpisodesOptions struct {
AnimeCollection *anilist.AnimeCollection
LocalFiles []*LocalFile
SilencedMediaIds []int
MetadataProvider metadata.Provider
}
)
func NewMissingEpisodes(opts *NewMissingEpisodesOptions) *MissingEpisodes {
missing := new(MissingEpisodes)
reqEvent := new(MissingEpisodesRequestedEvent)
reqEvent.AnimeCollection = opts.AnimeCollection
reqEvent.LocalFiles = opts.LocalFiles
reqEvent.SilencedMediaIds = opts.SilencedMediaIds
reqEvent.MissingEpisodes = missing
err := hook.GlobalHookManager.OnMissingEpisodesRequested().Trigger(reqEvent)
if err != nil {
return nil
}
opts.AnimeCollection = reqEvent.AnimeCollection // Override the anime collection
opts.LocalFiles = reqEvent.LocalFiles // Override the local files
opts.SilencedMediaIds = reqEvent.SilencedMediaIds // Override the silenced media IDs
missing = reqEvent.MissingEpisodes
// Default prevented by hook, return the missing episodes
if reqEvent.DefaultPrevented {
event := new(MissingEpisodesEvent)
event.MissingEpisodes = missing
err = hook.GlobalHookManager.OnMissingEpisodes().Trigger(event)
if err != nil {
return nil
}
return event.MissingEpisodes
}
groupedLfs := GroupLocalFilesByMediaID(opts.LocalFiles)
rateLimiter := limiter.NewLimiter(time.Second, 20)
p := pool.NewWithResults[[]*EntryDownloadEpisode]()
for mId, lfs := range groupedLfs {
p.Go(func() []*EntryDownloadEpisode {
entry, found := opts.AnimeCollection.GetListEntryFromAnimeId(mId)
if !found {
return nil
}
// Skip if the status is nil, dropped or completed
if entry.Status == nil || *entry.Status == anilist.MediaListStatusDropped || *entry.Status == anilist.MediaListStatusCompleted {
return nil
}
latestLf, found := FindLatestLocalFileFromGroup(lfs)
if !found {
return nil
}
//If the latest local file is the same or higher than the current episode count, skip
if entry.Media.GetCurrentEpisodeCount() == -1 || entry.Media.GetCurrentEpisodeCount() <= latestLf.GetEpisodeNumber() {
return nil
}
rateLimiter.Wait()
// Fetch anime metadata
animeMetadata, err := opts.MetadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, entry.Media.ID)
if err != nil {
return nil
}
// Get download info
downloadInfo, err := NewEntryDownloadInfo(&NewEntryDownloadInfoOptions{
LocalFiles: lfs,
AnimeMetadata: animeMetadata,
Media: entry.Media,
Progress: entry.Progress,
Status: entry.Status,
MetadataProvider: opts.MetadataProvider,
})
if err != nil {
return nil
}
episodes := downloadInfo.EpisodesToDownload
sort.Slice(episodes, func(i, j int) bool {
return episodes[i].Episode.GetEpisodeNumber() < episodes[j].Episode.GetEpisodeNumber()
})
// If there are more than 1 episode to download, modify the name of the first episode
if len(episodes) > 1 {
episodes = episodes[:1] // keep the first episode
if episodes[0].Episode != nil {
episodes[0].Episode.DisplayTitle = episodes[0].Episode.DisplayTitle + fmt.Sprintf(" & %d more", len(downloadInfo.EpisodesToDownload)-1)
}
}
return episodes
})
}
epsToDownload := p.Wait()
epsToDownload = lo.Filter(epsToDownload, func(item []*EntryDownloadEpisode, _ int) bool {
return item != nil
})
// Flatten
flattenedEpsToDownload := lo.Flatten(epsToDownload)
eps := lop.Map(flattenedEpsToDownload, func(item *EntryDownloadEpisode, _ int) *Episode {
return item.Episode
})
// Sort
sort.Slice(eps, func(i, j int) bool {
return eps[i].GetEpisodeNumber() < eps[j].GetEpisodeNumber()
})
sort.Slice(eps, func(i, j int) bool {
return eps[i].BaseAnime.ID < eps[j].BaseAnime.ID
})
missing.Episodes = lo.Filter(eps, func(item *Episode, _ int) bool {
return !lo.Contains(opts.SilencedMediaIds, item.BaseAnime.ID)
})
missing.SilencedEpisodes = lo.Filter(eps, func(item *Episode, _ int) bool {
return lo.Contains(opts.SilencedMediaIds, item.BaseAnime.ID)
})
// Event
event := new(MissingEpisodesEvent)
event.MissingEpisodes = missing
err = hook.GlobalHookManager.OnMissingEpisodes().Trigger(event)
if err != nil {
return nil
}
return event.MissingEpisodes
}

View File

@@ -0,0 +1,85 @@
package anime_test
import (
"context"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"testing"
)
// Test to retrieve accurate missing episodes
// DEPRECATED
func TestNewMissingEpisodes(t *testing.T) {
t.Skip("Outdated test")
test_utils.InitTestProvider(t, test_utils.Anilist())
metadataProvider := metadata.GetMockProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
if err != nil {
t.Fatal(err)
}
tests := []struct {
name string
mediaId int
localFiles []*anime.LocalFile
mediaAiredEpisodes int
currentProgress int
expectedMissingEpisodes int
}{
{
// Sousou no Frieren - 10 currently aired episodes
// User has 5 local files from ep 1 to 5, but only watched 4 episodes
// So we should expect to see 5 missing episodes
name: "Sousou no Frieren, missing 5 episodes",
mediaId: 154587,
localFiles: anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv", 154587, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
}),
),
mediaAiredEpisodes: 10,
currentProgress: 4,
//expectedMissingEpisodes: 5,
expectedMissingEpisodes: 1, // DEVNOTE: Now the value is 1 at most because everything else is merged
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Mock Anilist collection
anilist.TestModifyAnimeCollectionEntry(animeCollection, tt.mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Progress: lo.ToPtr(tt.currentProgress), // Mock progress
AiredEpisodes: lo.ToPtr(tt.mediaAiredEpisodes),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{
Episode: tt.mediaAiredEpisodes + 1,
},
})
})
if assert.NoError(t, err) {
missingData := anime.NewMissingEpisodes(&anime.NewMissingEpisodesOptions{
AnimeCollection: animeCollection,
LocalFiles: tt.localFiles,
MetadataProvider: metadataProvider,
})
assert.Equal(t, tt.expectedMissingEpisodes, len(missingData.Episodes))
}
}
}

View File

@@ -0,0 +1,24 @@
package anime
import (
"seanime/internal/api/anilist"
"seanime/internal/util/result"
)
type NormalizedMedia struct {
*anilist.BaseAnime
}
type NormalizedMediaCache struct {
*result.Cache[int, *NormalizedMedia]
}
func NewNormalizedMedia(m *anilist.BaseAnime) *NormalizedMedia {
return &NormalizedMedia{
BaseAnime: m,
}
}
func NewNormalizedMediaCache() *NormalizedMediaCache {
return &NormalizedMediaCache{result.NewCache[int, *NormalizedMedia]()}
}

View File

@@ -0,0 +1,50 @@
package anime
import (
"seanime/internal/util"
)
type (
// Playlist holds the data from models.PlaylistEntry
Playlist struct {
DbId uint `json:"dbId"` // DbId is the database ID of the models.PlaylistEntry
Name string `json:"name"` // Name is the name of the playlist
LocalFiles []*LocalFile `json:"localFiles"` // LocalFiles is a list of local files in the playlist, in order
}
)
// NewPlaylist creates a new Playlist instance
func NewPlaylist(name string) *Playlist {
return &Playlist{
Name: name,
LocalFiles: make([]*LocalFile, 0),
}
}
func (pd *Playlist) SetLocalFiles(lfs []*LocalFile) {
pd.LocalFiles = lfs
}
// AddLocalFile adds a local file to the playlist
func (pd *Playlist) AddLocalFile(localFile *LocalFile) {
pd.LocalFiles = append(pd.LocalFiles, localFile)
}
// RemoveLocalFile removes a local file from the playlist
func (pd *Playlist) RemoveLocalFile(path string) {
for i, lf := range pd.LocalFiles {
if lf.GetNormalizedPath() == util.NormalizePath(path) {
pd.LocalFiles = append(pd.LocalFiles[:i], pd.LocalFiles[i+1:]...)
return
}
}
}
func (pd *Playlist) LocalFileExists(path string, lfs []*LocalFile) bool {
for _, lf := range lfs {
if lf.GetNormalizedPath() == util.NormalizePath(path) {
return true
}
}
return false
}

View File

@@ -0,0 +1,111 @@
package anime
import (
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/hook"
"time"
"github.com/samber/lo"
)
type ScheduleItem struct {
MediaId int `json:"mediaId"`
Title string `json:"title"`
// Time is in 15:04 format
Time string `json:"time"`
// DateTime is in UTC
DateTime time.Time `json:"dateTime"`
Image string `json:"image"`
EpisodeNumber int `json:"episodeNumber"`
IsMovie bool `json:"isMovie"`
IsSeasonFinale bool `json:"isSeasonFinale"`
}
func GetScheduleItems(animeSchedule *anilist.AnimeAiringSchedule, animeCollection *anilist.AnimeCollection) []*ScheduleItem {
animeEntryMap := make(map[int]*anilist.AnimeListEntry)
for _, list := range animeCollection.MediaListCollection.GetLists() {
for _, entry := range list.GetEntries() {
animeEntryMap[entry.GetMedia().GetID()] = entry
}
}
type animeScheduleNode interface {
GetAiringAt() int
GetTimeUntilAiring() int
GetEpisode() int
}
type animeScheduleMedia interface {
GetMedia() []*anilist.AnimeSchedule
}
formatNodeItem := func(node animeScheduleNode, entry *anilist.AnimeListEntry) *ScheduleItem {
t := time.Unix(int64(node.GetAiringAt()), 0)
item := &ScheduleItem{
MediaId: entry.GetMedia().GetID(),
Title: *entry.GetMedia().GetTitle().GetUserPreferred(),
Time: t.UTC().Format("15:04"),
DateTime: t.UTC(),
Image: entry.GetMedia().GetCoverImageSafe(),
EpisodeNumber: node.GetEpisode(),
IsMovie: entry.GetMedia().IsMovie(),
IsSeasonFinale: false,
}
if entry.GetMedia().GetTotalEpisodeCount() > 0 && node.GetEpisode() == entry.GetMedia().GetTotalEpisodeCount() {
item.IsSeasonFinale = true
}
return item
}
formatPart := func(m animeScheduleMedia) ([]*ScheduleItem, bool) {
if m == nil {
return nil, false
}
ret := make([]*ScheduleItem, 0)
for _, m := range m.GetMedia() {
entry, ok := animeEntryMap[m.GetID()]
if !ok || entry.Status == nil || *entry.Status == anilist.MediaListStatusDropped {
continue
}
for _, n := range m.GetPrevious().GetNodes() {
ret = append(ret, formatNodeItem(n, entry))
}
for _, n := range m.GetUpcoming().GetNodes() {
ret = append(ret, formatNodeItem(n, entry))
}
}
return ret, true
}
ongoingItems, _ := formatPart(animeSchedule.GetOngoing())
ongoingNextItems, _ := formatPart(animeSchedule.GetOngoingNext())
precedingItems, _ := formatPart(animeSchedule.GetPreceding())
upcomingItems, _ := formatPart(animeSchedule.GetUpcoming())
upcomingNextItems, _ := formatPart(animeSchedule.GetUpcomingNext())
allItems := make([]*ScheduleItem, 0)
allItems = append(allItems, ongoingItems...)
allItems = append(allItems, ongoingNextItems...)
allItems = append(allItems, precedingItems...)
allItems = append(allItems, upcomingItems...)
allItems = append(allItems, upcomingNextItems...)
ret := lo.UniqBy(allItems, func(item *ScheduleItem) string {
if item == nil {
return ""
}
return fmt.Sprintf("%d-%d-%d", item.MediaId, item.EpisodeNumber, item.DateTime.Unix())
})
event := &AnimeScheduleItemsEvent{
AnimeCollection: animeCollection,
Items: ret,
}
err := hook.GlobalHookManager.OnAnimeScheduleItems().Trigger(event)
if err != nil {
return ret
}
return event.Items
}

View File

@@ -0,0 +1,80 @@
package anime
import (
"strconv"
"strings"
)
type MockHydratedLocalFileOptions struct {
FilePath string
LibraryPath string
MediaId int
MetadataEpisode int
MetadataAniDbEpisode string
MetadataType LocalFileType
}
func MockHydratedLocalFile(opts MockHydratedLocalFileOptions) *LocalFile {
lf := NewLocalFile(opts.FilePath, opts.LibraryPath)
lf.MediaId = opts.MediaId
lf.Metadata = &LocalFileMetadata{
AniDBEpisode: opts.MetadataAniDbEpisode,
Episode: opts.MetadataEpisode,
Type: opts.MetadataType,
}
return lf
}
// MockHydratedLocalFiles creates a slice of LocalFiles based on the provided options
//
// Example:
//
// MockHydratedLocalFiles(
// MockHydratedLocalFileOptions{
// FilePath: "/mnt/anime/One Piece/One Piece - 1070.mkv",
// LibraryPath: "/mnt/anime/",
// MetadataEpisode: 1070,
// MetadataAniDbEpisode: "1070",
// MetadataType: LocalFileTypeMain,
// },
// MockHydratedLocalFileOptions{
// ...
// },
// )
func MockHydratedLocalFiles(opts ...[]MockHydratedLocalFileOptions) []*LocalFile {
lfs := make([]*LocalFile, 0, len(opts))
for _, opt := range opts {
for _, o := range opt {
lfs = append(lfs, MockHydratedLocalFile(o))
}
}
return lfs
}
type MockHydratedLocalFileWrapperOptionsMetadata struct {
MetadataEpisode int
MetadataAniDbEpisode string
MetadataType LocalFileType
}
// MockGenerateHydratedLocalFileGroupOptions generates a slice of MockHydratedLocalFileOptions based on a template string and metadata
//
// Example:
//
// MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "One Piece/One Piece - %ep.mkv", 21, []MockHydratedLocalFileWrapperOptionsMetadata{
// {MetadataEpisode: 1070, MetadataAniDbEpisode: "1070", MetadataType: LocalFileTypeMain},
// })
func MockGenerateHydratedLocalFileGroupOptions(libraryPath string, template string, mId int, m []MockHydratedLocalFileWrapperOptionsMetadata) []MockHydratedLocalFileOptions {
opts := make([]MockHydratedLocalFileOptions, 0, len(m))
for _, metadata := range m {
opts = append(opts, MockHydratedLocalFileOptions{
FilePath: strings.ReplaceAll(template, "%ep", strconv.Itoa(metadata.MetadataEpisode)),
LibraryPath: libraryPath,
MediaId: mId,
MetadataEpisode: metadata.MetadataEpisode,
MetadataAniDbEpisode: metadata.MetadataAniDbEpisode,
MetadataType: metadata.MetadataType,
})
}
return opts
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,93 @@
package autodownloader
import (
"errors"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/library/anime"
"sync"
"github.com/5rahim/habari"
"github.com/samber/lo"
)
type (
// NormalizedTorrent is a struct built from torrent from a provider.
// It is used to normalize the data from different providers so that it can be used by the AutoDownloader.
NormalizedTorrent struct {
hibiketorrent.AnimeTorrent
ParsedData *habari.Metadata `json:"parsedData"`
magnet string // Access using GetMagnet()
}
)
func (ad *AutoDownloader) getLatestTorrents(rules []*anime.AutoDownloaderRule) (ret []*NormalizedTorrent, err error) {
ad.logger.Debug().Msg("autodownloader: Checking for new episodes")
providerExtension, ok := ad.torrentRepository.GetDefaultAnimeProviderExtension()
if !ok {
ad.logger.Warn().Msg("autodownloader: No default torrent provider found")
return nil, errors.New("no default torrent provider found")
}
// Get the latest torrents
torrents, err := providerExtension.GetProvider().GetLatest()
if err != nil {
ad.logger.Error().Err(err).Msg("autodownloader: Failed to get latest torrents")
return nil, err
}
if ad.settings.EnableEnhancedQueries {
// Get unique release groups
uniqueReleaseGroups := GetUniqueReleaseGroups(rules)
// Filter the torrents
wg := sync.WaitGroup{}
mu := sync.Mutex{}
wg.Add(len(uniqueReleaseGroups))
for _, releaseGroup := range uniqueReleaseGroups {
go func(releaseGroup string) {
defer wg.Done()
filteredTorrents, err := providerExtension.GetProvider().Search(hibiketorrent.AnimeSearchOptions{
Media: hibiketorrent.Media{},
Query: releaseGroup,
})
if err != nil {
return
}
mu.Lock()
torrents = append(torrents, filteredTorrents...)
mu.Unlock()
}(releaseGroup)
}
wg.Wait()
// Remove duplicates
torrents = lo.UniqBy(torrents, func(t *hibiketorrent.AnimeTorrent) string {
return t.Name
})
}
// Normalize the torrents
ret = make([]*NormalizedTorrent, 0, len(torrents))
for _, t := range torrents {
parsedData := habari.Parse(t.Name)
ret = append(ret, &NormalizedTorrent{
AnimeTorrent: *t,
ParsedData: parsedData,
})
}
return ret, nil
}
// GetMagnet returns the magnet link for the torrent.
func (t *NormalizedTorrent) GetMagnet(providerExtension hibiketorrent.AnimeProvider) (string, error) {
if t.magnet == "" {
magnet, err := providerExtension.GetTorrentMagnetLink(&t.AnimeTorrent)
if err != nil {
return "", err
}
t.magnet = magnet
return t.magnet, nil
}
return t.magnet, nil
}

View File

@@ -0,0 +1,327 @@
package autodownloader
import (
"github.com/5rahim/habari"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/database/models"
"seanime/internal/library/anime"
"testing"
)
func TestComparison(t *testing.T) {
ad := AutoDownloader{
metadataProvider: metadata.GetMockProvider(t),
settings: &models.AutoDownloaderSettings{
EnableSeasonCheck: true,
},
}
name1 := "[Oshi no Ko] 2nd Season"
name2 := "Oshi no Ko Season 2"
aniListEntry := &anilist.AnimeListEntry{
Media: &anilist.BaseAnime{
ID: 166531,
Title: &anilist.BaseAnime_Title{
Romaji: &name1,
English: &name2,
},
Episodes: lo.ToPtr(13),
Format: lo.ToPtr(anilist.MediaFormatTv),
},
}
rule := &anime.AutoDownloaderRule{
MediaId: 166531,
ReleaseGroups: []string{"SubsPlease", "Erai-raws"},
Resolutions: []string{"1080p"},
TitleComparisonType: "likely",
EpisodeType: "recent",
EpisodeNumbers: []int{3}, // ignored
Destination: "/data/seanime/library/[Oshi no Ko] 2nd Season",
ComparisonTitle: "[Oshi no Ko] 2nd Season",
}
tests := []struct {
torrentName string
succeedTitleComparison bool
succeedSeasonAndEpisodeMatch bool
enableSeasonCheck bool
}{
{
torrentName: "[Erai-raws] Oshi no Ko 2nd Season - 03 [720p][Multiple Subtitle] [ENG][FRE]",
succeedTitleComparison: true,
succeedSeasonAndEpisodeMatch: true,
enableSeasonCheck: true,
},
{
torrentName: "[SubsPlease] Oshi no Ko - 16 (1080p)",
succeedTitleComparison: true,
succeedSeasonAndEpisodeMatch: true,
enableSeasonCheck: true,
},
{
torrentName: "[Erai-raws] Oshi no Ko 3rd Season - 03 [720p][Multiple Subtitle] [ENG][FRE]",
succeedTitleComparison: true,
succeedSeasonAndEpisodeMatch: false,
enableSeasonCheck: true,
},
{
torrentName: "[Erai-raws] Oshi no Ko 2nd Season - 03 [720p][Multiple Subtitle] [ENG][FRE]",
succeedTitleComparison: true,
succeedSeasonAndEpisodeMatch: true,
enableSeasonCheck: false,
},
{
torrentName: "[SubsPlease] Oshi no Ko - 16 (1080p)",
succeedTitleComparison: true,
succeedSeasonAndEpisodeMatch: true,
enableSeasonCheck: false,
},
{
torrentName: "[Erai-raws] Oshi no Ko 3rd Season - 03 [720p][Multiple Subtitle] [ENG][FRE]",
succeedTitleComparison: true,
succeedSeasonAndEpisodeMatch: true,
enableSeasonCheck: false,
},
}
lfw := anime.NewLocalFileWrapper([]*anime.LocalFile{
{
Path: "/data/seanime/library/[Oshi no Ko] 2nd Season/[SubsPlease] Oshi no Ko - 12 (1080p).mkv",
Name: "Oshi no Ko - 12 (1080p).mkv",
ParsedData: &anime.LocalFileParsedData{
Original: "Oshi no Ko - 12 (1080p).mkv",
Title: "Oshi no Ko",
ReleaseGroup: "SubsPlease",
},
ParsedFolderData: []*anime.LocalFileParsedData{
{
Original: "[Oshi no Ko] 2nd Season",
Title: "[Oshi no Ko]",
},
},
Metadata: &anime.LocalFileMetadata{
Episode: 1,
AniDBEpisode: "1",
Type: "main",
},
MediaId: 166531,
},
})
for _, tt := range tests {
t.Run(tt.torrentName, func(t *testing.T) {
ad.settings.EnableSeasonCheck = tt.enableSeasonCheck
p := habari.Parse(tt.torrentName)
if tt.succeedTitleComparison {
require.True(t, ad.isTitleMatch(p, tt.torrentName, rule, aniListEntry))
} else {
require.False(t, ad.isTitleMatch(p, tt.torrentName, rule, aniListEntry))
}
lfwe, ok := lfw.GetLocalEntryById(166531)
require.True(t, ok)
_, ok = ad.isSeasonAndEpisodeMatch(p, rule, aniListEntry, lfwe, []*models.AutoDownloaderItem{})
if tt.succeedSeasonAndEpisodeMatch {
require.True(t, ok)
} else {
require.False(t, ok)
}
})
}
}
func TestComparison2(t *testing.T) {
ad := AutoDownloader{
metadataProvider: metadata.GetMockProvider(t),
settings: &models.AutoDownloaderSettings{
EnableSeasonCheck: true,
},
}
name1 := "DANDADAN"
name2 := "Dandadan"
aniListEntry := &anilist.AnimeListEntry{
Media: &anilist.BaseAnime{
Title: &anilist.BaseAnime_Title{
Romaji: &name1,
English: &name2,
},
Episodes: lo.ToPtr(12),
Status: lo.ToPtr(anilist.MediaStatusFinished),
Format: lo.ToPtr(anilist.MediaFormatTv),
},
}
rule := &anime.AutoDownloaderRule{
MediaId: 166531,
ReleaseGroups: []string{},
Resolutions: []string{"1080p"},
TitleComparisonType: "likely",
EpisodeType: "recent",
EpisodeNumbers: []int{},
Destination: "/data/seanime/library/Dandadan",
ComparisonTitle: "Dandadan",
}
tests := []struct {
torrentName string
succeedAdditionalTermsMatch bool
ruleAdditionalTerms []string
}{
{
torrentName: "[Anime Time] Dandadan - 04 [Dual Audio][1080p][HEVC 10bit x265][AAC][Multi Sub] [Weekly]",
ruleAdditionalTerms: []string{},
succeedAdditionalTermsMatch: true,
},
{
torrentName: "[Anime Time] Dandadan - 04 [Dual Audio][1080p][HEVC 10bit x265][AAC][Multi Sub] [Weekly]",
ruleAdditionalTerms: []string{
"H265,H.265, H 265,x265",
"10bit,10-bit,10 bit",
},
succeedAdditionalTermsMatch: true,
},
{
torrentName: "[Raze] Dandadan - 04 x265 10bit 1080p 143.8561fps.mkv",
ruleAdditionalTerms: []string{
"H265,H.265, H 265,x265",
"10bit,10-bit,10 bit",
},
succeedAdditionalTermsMatch: true,
},
//{ // DEVNOTE: Doesn't pass because of title
// torrentName: "[Sokudo] DAN DA DAN | Dandadan - S01E03 [1080p EAC-3 AV1][Dual Audio] (weekly)",
// ruleAdditionalTerms: []string{
// "H265,H.265, H 265,x265",
// "10bit,10-bit,10 bit",
// },
// succeedAdditionalTermsMatch: false,
//},
{
torrentName: "[Raze] Dandadan - 04 x265 10bit 1080p 143.8561fps.mkv",
ruleAdditionalTerms: []string{
"H265,H.265, H 265,x265",
"10bit,10-bit,10 bit",
"AAC",
},
succeedAdditionalTermsMatch: false,
},
}
for _, tt := range tests {
t.Run(tt.torrentName, func(t *testing.T) {
rule.AdditionalTerms = tt.ruleAdditionalTerms
ok := ad.isTitleMatch(habari.Parse(tt.torrentName), tt.torrentName, rule, aniListEntry)
assert.True(t, ok)
ok = ad.isAdditionalTermsMatch(tt.torrentName, rule)
if tt.succeedAdditionalTermsMatch {
assert.True(t, ok)
} else {
assert.False(t, ok)
}
})
}
}
func TestComparison3(t *testing.T) {
ad := AutoDownloader{
metadataProvider: metadata.GetMockProvider(t),
settings: &models.AutoDownloaderSettings{
EnableSeasonCheck: true,
},
}
name1 := "Dandadan"
name2 := "DAN DA DAN"
aniListEntry := &anilist.AnimeListEntry{
Media: &anilist.BaseAnime{
Title: &anilist.BaseAnime_Title{
Romaji: &name1,
English: &name2,
},
Status: lo.ToPtr(anilist.MediaStatusFinished),
Episodes: lo.ToPtr(12),
Format: lo.ToPtr(anilist.MediaFormatTv),
},
}
rule := &anime.AutoDownloaderRule{
MediaId: 166531,
ReleaseGroups: []string{},
Resolutions: []string{},
TitleComparisonType: "likely",
EpisodeType: "recent",
EpisodeNumbers: []int{},
Destination: "/data/seanime/library/Dandadan",
ComparisonTitle: "Dandadan",
}
tests := []struct {
torrentName string
succeedTitleComparison bool
succeedSeasonAndEpisodeMatch bool
enableSeasonCheck bool
}{
{
torrentName: "[Salieri] Zom 100 Bucket List of the Dead - S1 - BD (1080p) (HDR) [Dual Audio]",
succeedTitleComparison: false,
succeedSeasonAndEpisodeMatch: false,
enableSeasonCheck: false,
},
}
lfw := anime.NewLocalFileWrapper([]*anime.LocalFile{
{
Path: "/data/seanime/library/Dandadan/[SubsPlease] Dandadan - 01 (1080p).mkv",
Name: "Dandadan - 01 (1080p).mkv",
ParsedData: &anime.LocalFileParsedData{
Original: "Dandadan - 01 (1080p).mkv",
Title: "Dandadan",
ReleaseGroup: "SubsPlease",
},
ParsedFolderData: []*anime.LocalFileParsedData{
{
Original: "Dandadan",
Title: "Dandadan",
},
},
Metadata: &anime.LocalFileMetadata{
Episode: 1,
AniDBEpisode: "1",
Type: "main",
},
MediaId: 171018,
},
})
for _, tt := range tests {
t.Run(tt.torrentName, func(t *testing.T) {
ad.settings.EnableSeasonCheck = tt.enableSeasonCheck
p := habari.Parse(tt.torrentName)
if tt.succeedTitleComparison {
require.True(t, ad.isTitleMatch(p, tt.torrentName, rule, aniListEntry))
} else {
require.False(t, ad.isTitleMatch(p, tt.torrentName, rule, aniListEntry))
}
lfwe, ok := lfw.GetLocalEntryById(171018)
require.True(t, ok)
_, ok = ad.isSeasonAndEpisodeMatch(p, rule, aniListEntry, lfwe, []*models.AutoDownloaderItem{})
if tt.succeedSeasonAndEpisodeMatch {
assert.True(t, ok)
} else {
assert.False(t, ok)
}
})
}
}

View File

@@ -0,0 +1,21 @@
package autodownloader
import (
"seanime/internal/library/anime"
"strings"
)
func GetUniqueReleaseGroups(rules []*anime.AutoDownloaderRule) []string {
uniqueReleaseGroups := make(map[string]string)
for _, rule := range rules {
for _, releaseGroup := range rule.ReleaseGroups {
// make it case-insensitive
uniqueReleaseGroups[strings.ToLower(releaseGroup)] = releaseGroup
}
}
var result []string
for k := range uniqueReleaseGroups {
result = append(result, k)
}
return result
}

View File

@@ -0,0 +1,60 @@
package autodownloader
import (
"seanime/internal/api/anilist"
"seanime/internal/database/models"
"seanime/internal/hook_resolver"
"seanime/internal/library/anime"
)
// AutoDownloaderRunStartedEvent is triggered when the autodownloader starts checking for new episodes.
// Prevent default to abort the run.
type AutoDownloaderRunStartedEvent struct {
hook_resolver.Event
Rules []*anime.AutoDownloaderRule `json:"rules"`
}
// AutoDownloaderTorrentsFetchedEvent is triggered at the beginning of a run, when the autodownloader fetches torrents from the provider.
type AutoDownloaderTorrentsFetchedEvent struct {
hook_resolver.Event
Torrents []*NormalizedTorrent `json:"torrents"`
}
// AutoDownloaderMatchVerifiedEvent is triggered when a torrent is verified to follow a rule.
// Prevent default to abort the download if the match is found.
type AutoDownloaderMatchVerifiedEvent struct {
hook_resolver.Event
// Fetched torrent
Torrent *NormalizedTorrent `json:"torrent"`
Rule *anime.AutoDownloaderRule `json:"rule"`
ListEntry *anilist.AnimeListEntry `json:"listEntry"`
LocalEntry *anime.LocalFileWrapperEntry `json:"localEntry"`
// The episode number found for the match
// If the match failed, this will be 0
Episode int `json:"episode"`
// Whether the torrent matches the rule
// Changing this value to true will trigger a download even if the match failed;
MatchFound bool `json:"matchFound"`
}
// AutoDownloaderSettingsUpdatedEvent is triggered when the autodownloader settings are updated
type AutoDownloaderSettingsUpdatedEvent struct {
hook_resolver.Event
Settings *models.AutoDownloaderSettings `json:"settings"`
}
// AutoDownloaderBeforeDownloadTorrentEvent is triggered when the autodownloader is about to download a torrent.
// Prevent default to abort the download.
type AutoDownloaderBeforeDownloadTorrentEvent struct {
hook_resolver.Event
Torrent *NormalizedTorrent `json:"torrent"`
Rule *anime.AutoDownloaderRule `json:"rule"`
Items []*models.AutoDownloaderItem `json:"items"`
}
// AutoDownloaderAfterDownloadTorrentEvent is triggered when the autodownloader has downloaded a torrent.
type AutoDownloaderAfterDownloadTorrentEvent struct {
hook_resolver.Event
Torrent *NormalizedTorrent `json:"torrent"`
Rule *anime.AutoDownloaderRule `json:"rule"`
}

View File

@@ -0,0 +1,270 @@
package autoscanner
import (
"context"
"errors"
"seanime/internal/api/metadata"
"seanime/internal/database/db"
"seanime/internal/database/db_bridge"
"seanime/internal/database/models"
"seanime/internal/events"
"seanime/internal/library/autodownloader"
"seanime/internal/library/scanner"
"seanime/internal/library/summary"
"seanime/internal/notifier"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"sync"
"time"
"github.com/rs/zerolog"
)
type (
AutoScanner struct {
fileActionCh chan struct{} // Used to notify the scanner that a file action has occurred.
waiting bool // Used to prevent multiple scans from occurring at the same time.
missedAction bool // Used to indicate that a file action was missed while scanning.
mu sync.Mutex
scannedCh chan struct{}
waitTime time.Duration // Wait time to listen to additional changes before triggering a scan.
enabled bool
settings models.LibrarySettings
platform platform.Platform
logger *zerolog.Logger
wsEventManager events.WSEventManagerInterface
db *db.Database // Database instance is required to update the local files.
autoDownloader *autodownloader.AutoDownloader // AutoDownloader instance is required to refresh queue.
metadataProvider metadata.Provider
logsDir string
}
NewAutoScannerOptions struct {
Database *db.Database
Platform platform.Platform
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
Enabled bool
AutoDownloader *autodownloader.AutoDownloader
WaitTime time.Duration
MetadataProvider metadata.Provider
LogsDir string
}
)
func New(opts *NewAutoScannerOptions) *AutoScanner {
wt := time.Second * 15 // Default wait time is 15 seconds.
if opts.WaitTime > 0 {
wt = opts.WaitTime
}
return &AutoScanner{
fileActionCh: make(chan struct{}, 1),
waiting: false,
missedAction: false,
mu: sync.Mutex{},
scannedCh: make(chan struct{}, 1),
waitTime: wt,
enabled: opts.Enabled,
platform: opts.Platform,
logger: opts.Logger,
wsEventManager: opts.WSEventManager,
db: opts.Database,
autoDownloader: opts.AutoDownloader,
metadataProvider: opts.MetadataProvider,
logsDir: opts.LogsDir,
}
}
// Notify is used to notify the AutoScanner that a file action has occurred.
func (as *AutoScanner) Notify() {
if as == nil {
return
}
defer util.HandlePanicInModuleThen("scanner/autoscanner/Notify", func() {
as.logger.Error().Msg("autoscanner: recovered from panic")
})
as.mu.Lock()
defer as.mu.Unlock()
// If we are currently scanning, we will set the missedAction flag to true.
if as.waiting {
as.missedAction = true
return
}
if as.enabled {
go func() {
// Otherwise, we will send a signal to the fileActionCh.
as.fileActionCh <- struct{}{}
}()
}
}
// Start starts the AutoScanner in a goroutine.
func (as *AutoScanner) Start() {
go func() {
if as.enabled {
as.logger.Info().Msg("autoscanner: Module started")
}
as.watch()
}()
}
// SetSettings should be called after the settings are fetched and updated from the database.
func (as *AutoScanner) SetSettings(settings models.LibrarySettings) {
as.mu.Lock()
defer as.mu.Unlock()
as.enabled = settings.AutoScan
as.settings = settings
}
// watch is used to watch for file actions and trigger a scan.
// When a file action occurs, it will wait 30 seconds before triggering a scan.
// If another file action occurs within that 30 seconds, it will reset the timer.
// After the 30 seconds have passed, it will trigger a scan.
// When a scan is complete, it will check the missedAction flag and trigger another scan if necessary.
func (as *AutoScanner) watch() {
defer util.HandlePanicInModuleThen("scanner/autoscanner/watch", func() {
as.logger.Error().Msg("autoscanner: recovered from panic")
})
for {
// Block until the file action channel is ready to receive a signal.
<-as.fileActionCh
as.waitAndScan()
}
}
// waitAndScan is used to wait for additional file actions before triggering a scan.
func (as *AutoScanner) waitAndScan() {
as.logger.Trace().Msgf("autoscanner: File action occurred, waiting %v seconds before triggering a scan.", as.waitTime.Seconds())
as.mu.Lock()
as.waiting = true // Set the scanning flag to true.
as.missedAction = false // Reset the missedAction flag.
as.mu.Unlock()
// Wait 30 seconds before triggering a scan.
// During this time, if another file action occurs, it will reset the timer after it has expired.
<-time.After(as.waitTime)
as.mu.Lock()
// If a file action occurred while we were waiting, we will trigger another scan.
if as.missedAction {
as.logger.Trace().Msg("autoscanner: Missed file action")
as.mu.Unlock()
as.waitAndScan()
return
}
as.waiting = false
as.mu.Unlock()
// Trigger a scan.
as.scan()
}
// RunNow bypasses checks and triggers a scan immediately, even if the autoscanner is disabled.
func (as *AutoScanner) RunNow() {
as.scan()
}
// scan is used to trigger a scan.
func (as *AutoScanner) scan() {
defer util.HandlePanicInModuleThen("scanner/autoscanner/scan", func() {
as.logger.Error().Msg("autoscanner: Recovered from panic")
})
// Create scan summary logger
scanSummaryLogger := summary.NewScanSummaryLogger()
as.logger.Trace().Msg("autoscanner: Starting scanner")
as.wsEventManager.SendEvent(events.AutoScanStarted, nil)
defer as.wsEventManager.SendEvent(events.AutoScanCompleted, nil)
settings, err := as.db.GetSettings()
if err != nil || settings == nil {
as.logger.Error().Err(err).Msg("autoscanner: Failed to get settings")
return
}
if settings.Library.LibraryPath == "" {
as.logger.Error().Msg("autoscanner: Library path is not set")
return
}
// Get existing local files
existingLfs, _, err := db_bridge.GetLocalFiles(as.db)
if err != nil {
as.logger.Error().Err(err).Msg("autoscanner: Failed to get existing local files")
return
}
// Create a new scan logger
var scanLogger *scanner.ScanLogger
if as.logsDir != "" {
scanLogger, err = scanner.NewScanLogger(as.logsDir)
if err != nil {
as.logger.Error().Err(err).Msg("autoscanner: Failed to create scan logger")
return
}
defer scanLogger.Done()
}
// Create a new scanner
sc := scanner.Scanner{
DirPath: settings.Library.LibraryPath,
OtherDirPaths: settings.Library.LibraryPaths,
Enhanced: false, // Do not use enhanced mode for auto scanner.
Platform: as.platform,
Logger: as.logger,
WSEventManager: as.wsEventManager,
ExistingLocalFiles: existingLfs,
SkipLockedFiles: true, // Skip locked files by default.
SkipIgnoredFiles: true,
ScanSummaryLogger: scanSummaryLogger,
ScanLogger: scanLogger,
MetadataProvider: as.metadataProvider,
MatchingThreshold: as.settings.ScannerMatchingThreshold,
MatchingAlgorithm: as.settings.ScannerMatchingAlgorithm,
}
allLfs, err := sc.Scan(context.Background())
if err != nil {
if errors.Is(err, scanner.ErrNoLocalFiles) {
return
} else {
as.logger.Error().Err(err).Msg("autoscanner: Failed to scan library")
return
}
}
if as.db != nil && len(allLfs) > 0 {
as.logger.Trace().Msg("autoscanner: Updating local files")
// Insert the local files
_, err = db_bridge.InsertLocalFiles(as.db, allLfs)
if err != nil {
as.logger.Error().Err(err).Msg("failed to insert local files")
return
}
}
// Save the scan summary
err = db_bridge.InsertScanSummary(as.db, scanSummaryLogger.GenerateSummary())
if err != nil {
as.logger.Error().Err(err).Msg("failed to insert scan summary")
}
// Refresh the queue
go as.autoDownloader.CleanUpDownloadedItems()
notifier.GlobalNotifier.Notify(notifier.AutoScanner, "Your library has been scanned.")
return
}

View File

@@ -0,0 +1,9 @@
package autoscanner
import (
"testing"
)
func TestAutoScanner(t *testing.T) {
}

View File

@@ -0,0 +1,67 @@
package filesystem
import (
"errors"
"github.com/rs/zerolog"
"os"
"path/filepath"
)
// RemoveEmptyDirectories deletes all empty directories in a given directory.
// It ignores errors.
func RemoveEmptyDirectories(root string, logger *zerolog.Logger) {
_ = filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
// Skip the root directory
if path == root {
return nil
}
if info.IsDir() {
// Check if the directory is empty
isEmpty, err := isDirectoryEmpty(path)
if err != nil {
return nil
}
// Delete the empty directory
if isEmpty {
err := os.Remove(path)
if err != nil {
logger.Warn().Err(err).Str("path", path).Msg("filesystem: Could not delete empty directory")
}
logger.Info().Str("path", path).Msg("filesystem: Deleted empty directory")
// ignore error
}
}
return nil
})
}
func isDirectoryEmpty(path string) (bool, error) {
dir, err := os.Open(path)
if err != nil {
return false, err
}
defer dir.Close()
_, err = dir.Readdir(1)
if err == nil {
// Directory is not empty
return false, nil
}
if errors.Is(err, os.ErrNotExist) {
// Directory does not exist
return false, nil
}
// Directory is empty
return true, nil
}

View File

@@ -0,0 +1,16 @@
package filesystem
import (
"seanime/internal/util"
"testing"
)
func TestDeleteEmptyDirectories(t *testing.T) {
path := "E:/ANIME_TEST"
RemoveEmptyDirectories(path, util.NewLogger())
t.Log("All empty directories removed successfully.")
}

View File

@@ -0,0 +1,193 @@
package filesystem
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
"seanime/internal/util"
"sort"
"strings"
)
type SeparatedFilePath struct {
Filename string
Dirnames []string
PrefixPath string
}
// SeparateFilePath separates a path into a filename and a slice of dirnames while ignoring the prefix.
func SeparateFilePath(path string, prefixPath string) *SeparatedFilePath {
path = filepath.ToSlash(path)
prefixPath = filepath.ToSlash(prefixPath)
cleaned := path
if strings.HasPrefix(strings.ToLower(path), strings.ToLower(prefixPath)) {
cleaned = path[len(prefixPath):] // Remove prefix
}
fp := filepath.Base(filepath.ToSlash(path))
parentsPath := filepath.Dir(filepath.ToSlash(cleaned))
if parentsPath == "." || parentsPath == "/" || parentsPath == ".." {
parentsPath = ""
}
return &SeparatedFilePath{
Filename: fp,
Dirnames: strings.Split(parentsPath, "/"),
PrefixPath: prefixPath,
}
}
// SeparateFilePathS separates a path into a filename and a slice of dirnames while ignoring the prefix.
// Unlike [SeparateFilePath], it will check multiple prefixes.
//
// Example:
//
// path = "/path/to/file.mkv"
// potentialPrefixes = []string{"/path/to", "/path"}
// fp, dirs := SeparateFilePathS(path, potentialPrefixes)
// fmt.Println(fp) // file.mkv
// fmt.Println(dirs) // [to]
func SeparateFilePathS(path string, potentialPrefixes []string) *SeparatedFilePath {
// Sort prefix paths by length in descending order
sort.Slice(potentialPrefixes, func(i, j int) bool {
return len(potentialPrefixes[i]) > len(potentialPrefixes[j])
})
// Check each prefix path, and remove the first match from the path
prefixPath := ""
for _, p := range potentialPrefixes {
// Normalize the paths for comparison only
if strings.HasPrefix(util.NormalizePath(path), util.NormalizePath(p)) {
// Remove the prefix from the path
path = path[len(p):]
prefixPath = p
break
}
}
filename := filepath.ToSlash(filepath.Base(path))
parentsPath := filepath.ToSlash(filepath.Dir(filepath.ToSlash(path)))
dirs := make([]string, 0)
for _, dir := range strings.Split(parentsPath, "/") {
if dir != "" {
dirs = append(dirs, dir)
}
}
return &SeparatedFilePath{
Filename: filename,
Dirnames: dirs,
PrefixPath: prefixPath,
}
}
// GetMediaFilePathsFromDir returns a slice of strings containing the paths of all the media files in a directory.
// DEPRECATED: Use GetMediaFilePathsFromDirS instead.
func GetMediaFilePathsFromDir(dirPath string) ([]string, error) {
filePaths := make([]string, 0)
err := filepath.WalkDir(dirPath, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
ext := strings.ToLower(filepath.Ext(path))
if !d.IsDir() && util.IsValidVideoExtension(ext) {
filePaths = append(filePaths, path)
}
return nil
})
if err != nil {
return nil, errors.New("could not traverse the local directory")
}
return filePaths, nil
}
// GetMediaFilePathsFromDirS returns a slice of strings containing the paths of all the video files in a directory.
// Unlike GetMediaFilePathsFromDir, it follows symlinks.
func GetMediaFilePathsFromDirS(oDirPath string) ([]string, error) {
filePaths := make([]string, 0)
visited := make(map[string]bool)
// Normalize the initial directory path
dirPath, err := filepath.Abs(oDirPath)
if err != nil {
return nil, fmt.Errorf("could not resolve path: %w", err)
}
var walkDir func(string) error
walkDir = func(oCurrentPath string) error {
currentPath := oCurrentPath
// Normalize current path
resolvedPath, err := filepath.EvalSymlinks(oCurrentPath)
if err == nil {
currentPath = resolvedPath
}
if visited[currentPath] {
return nil
}
visited[currentPath] = true
return filepath.WalkDir(currentPath, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return nil
}
// If it's a symlink directory, resolve and walk the symlink
info, err := os.Lstat(path)
if err != nil {
return nil
}
if info.Mode()&os.ModeSymlink != 0 {
linkPath, err := os.Readlink(path)
if err != nil {
return nil
}
// Resolve the symlink to an absolute path
if !filepath.IsAbs(linkPath) {
linkPath = filepath.Join(filepath.Dir(path), linkPath)
}
// Only follow the symlink if we can access it
if _, err := os.Stat(linkPath); err == nil {
return walkDir(linkPath)
}
return nil
}
if d.IsDir() {
return nil
}
ext := strings.ToLower(filepath.Ext(path))
if util.IsValidMediaFile(path) && util.IsValidVideoExtension(ext) {
filePaths = append(filePaths, path)
}
return nil
})
}
if err = walkDir(dirPath); err != nil {
return nil, fmt.Errorf("could not traverse directory %s: %w", dirPath, err)
}
return filePaths, nil
}
//----------------------------------------------------------------------------------------------------------------------
func FileExists(filePath string) bool {
_, err := os.Stat(filePath)
return !errors.Is(err, os.ErrNotExist)
}

View File

@@ -0,0 +1,133 @@
package filesystem
import (
"fmt"
"os"
"path/filepath"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSeparateFilePathS(t *testing.T) {
tests := []struct {
path string
potentialPrefixes []string
expected *SeparatedFilePath
}{
{
path: "/path/to/file.mkv",
potentialPrefixes: []string{"/path/to", "/path"},
expected: &SeparatedFilePath{Filename: "file.mkv", Dirnames: []string{}},
},
{
path: "/path/TO/to/file.mkv",
potentialPrefixes: []string{"/path"},
expected: &SeparatedFilePath{Filename: "file.mkv", Dirnames: []string{"TO", "to"}},
},
{
path: "/path/to/file2.mkv",
potentialPrefixes: []string{},
expected: &SeparatedFilePath{Filename: "file2.mkv", Dirnames: []string{"path", "to"}},
},
{
path: "/mnt/Anime/Bungou Stray Dogs/Bungou Stray Dogs 5th Season/[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv",
potentialPrefixes: []string{"/mnt/Anime", "/mnt/Anime/", "/mnt", "/var/"},
expected: &SeparatedFilePath{Filename: "[SubsPlease] Bungou Stray Dogs - 61 (1080p) [F609B947].mkv", Dirnames: []string{"Bungou Stray Dogs", "Bungou Stray Dogs 5th Season"}},
},
}
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
fmt.Println("Here")
res := SeparateFilePathS(tt.path, tt.potentialPrefixes)
assert.Equal(t, tt.expected.Filename, res.Filename)
assert.Equal(t, tt.expected.Dirnames, res.Dirnames)
})
}
}
// Test with symlinks
func TestGetVideoFilePathsFromDir_WithSymlinks(t *testing.T) {
tmpDir := t.TempDir()
libDir := filepath.Join(tmpDir, "library")
externalLibDir := t.TempDir()
os.Mkdir(libDir, 0755)
// Create files in the external directory
createFile(t, filepath.Join(externalLibDir, "external_video1.mkv"))
createFile(t, filepath.Join(externalLibDir, "external_video2.mp4"))
// Create directories and files
dir1 := filepath.Join(libDir, "Anime1")
os.Mkdir(dir1, 0755)
createFile(t, filepath.Join(dir1, "Anime1_1.mkv"))
createFile(t, filepath.Join(dir1, "Anime1_2.mp4"))
dir2 := filepath.Join(libDir, "Anime2")
os.Mkdir(dir2, 0755)
createFile(t, filepath.Join(dir2, "Anime2_1.mkv"))
// Create a symlink to the external directory
symlinkPath := filepath.Join(libDir, "symlink_to_external")
if err := os.Symlink(externalLibDir, symlinkPath); err != nil {
t.Fatalf("Failed to create symlink: %s", err)
}
// Create a recursive symlink to the library directory
symlinkToLibPath := filepath.Join(externalLibDir, "symlink_to_library")
if err := os.Symlink(libDir, symlinkToLibPath); err != nil {
t.Fatalf("Failed to create symlink: %s", err)
}
// Expected files
expectedPaths := []string{
filepath.Join(dir1, "Anime1_1.mkv"),
filepath.Join(dir1, "Anime1_2.mp4"),
filepath.Join(dir2, "Anime2_1.mkv"),
filepath.Join(externalLibDir, "external_video1.mkv"),
filepath.Join(externalLibDir, "external_video2.mp4"),
}
filePaths, err := GetMediaFilePathsFromDirS(libDir)
if err != nil {
t.Fatalf("Unexpected error: %s", err)
}
util.Spew(filePaths)
// Check results
for _, expected := range expectedPaths {
found := false
for _, path := range filePaths {
// if path == expected {
// found = true
// break
// }
// Compare the paths using stdlib
info1, err := os.Stat(path)
if err != nil {
t.Fatalf("Failed to get file info for %s: %s", path, err)
}
info2, err := os.Stat(expected)
if err != nil {
t.Fatalf("Failed to get file info for %s: %s", expected, err)
}
if os.SameFile(info1, info2) {
found = true
break
}
}
if !found {
t.Errorf("Expected file path %s not found in result", expected)
}
}
}
func createFile(t *testing.T, path string) {
file, err := os.Create(path)
if err != nil {
t.Fatalf("Failed to create file: %s", err)
}
defer file.Close()
}

View File

@@ -0,0 +1,289 @@
package fillermanager
import (
"seanime/internal/api/filler"
"seanime/internal/database/db"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/onlinestream"
"seanime/internal/util"
"strconv"
"sync"
"time"
"github.com/rs/zerolog"
lop "github.com/samber/lo/parallel"
)
type (
Interface interface {
// RefetchFillerData re-fetches the fillers for the given media IDs
RefetchFillerData() error
// HasFillerFetched checks if the fillers for the given media ID have been fetched
HasFillerFetched(mediaId int) bool
// FetchAndStoreFillerData fetches the filler data for the given media ID
FetchAndStoreFillerData(mediaId int, titles []string) error
// RemoveFillerData removes the filler data for the given media ID
RemoveFillerData(mediaId int) error
// IsEpisodeFiller checks if the given episode number is a filler for the given media ID
IsEpisodeFiller(mediaId int, episodeNumber int) bool
}
FillerManager struct {
db *db.Database
logger *zerolog.Logger
fillerApi filler.API
}
NewFillerManagerOptions struct {
DB *db.Database
Logger *zerolog.Logger
}
)
func New(opts *NewFillerManagerOptions) *FillerManager {
return &FillerManager{
db: opts.DB,
logger: opts.Logger,
fillerApi: filler.NewAnimeFillerList(opts.Logger),
}
}
func (fm *FillerManager) RefetchFillerData() error {
defer util.HandlePanicInModuleThen("library/fillermanager/RefetchFillerData", func() {
fm.logger.Error().Msg("fillermanager: Failed to re-fetch filler data")
})
wg := sync.WaitGroup{}
fm.logger.Debug().Msg("fillermanager: Re-fetching filler data")
mediaFillers, err := fm.db.GetCachedMediaFillers()
if err != nil {
return err
}
for _, mf := range mediaFillers {
wg.Add(1)
go func(*db.MediaFillerItem) {
defer wg.Done()
// Fetch the db data
// Fetch the filler data
fillerData, err := fm.fillerApi.FindFillerData(mf.Slug)
if err != nil {
fm.logger.Error().Err(err).Int("mediaId", mf.MediaId).Msg("fillermanager: Failed to fetch filler data")
return
}
// Update the filler data
mf.FillerEpisodes = fillerData.FillerEpisodes
}(mf)
}
wg.Wait()
err = fm.db.SaveCachedMediaFillerItems()
if err != nil {
return err
}
fm.logger.Debug().Msg("fillermanager: Re-fetched filler data")
return nil
}
func (fm *FillerManager) HasFillerFetched(mediaId int) bool {
defer util.HandlePanicInModuleThen("library/fillermanager/HasFillerFetched", func() {
})
_, ok := fm.db.GetMediaFillerItem(mediaId)
return ok
}
func (fm *FillerManager) GetFillerEpisodes(mediaId int) ([]string, bool) {
defer util.HandlePanicInModuleThen("library/fillermanager/GetFillerEpisodes", func() {
})
fillerItem, ok := fm.db.GetMediaFillerItem(mediaId)
if !ok {
return nil, false
}
return fillerItem.FillerEpisodes, true
}
func (fm *FillerManager) FetchAndStoreFillerData(mediaId int, titles []string) error {
defer util.HandlePanicInModuleThen("library/fillermanager/FetchAndStoreFillerData", func() {
})
fm.logger.Debug().Int("mediaId", mediaId).Msg("fillermanager: Fetching filler data")
res, err := fm.fillerApi.Search(filler.SearchOptions{
Titles: titles,
})
if err != nil {
return err
}
fm.logger.Debug().Int("mediaId", mediaId).Str("slug", res.Slug).Msg("fillermanager: Fetched filler data")
return fm.fetchAndStoreFillerDataFromSlug(mediaId, res.Slug)
}
func (fm *FillerManager) fetchAndStoreFillerDataFromSlug(mediaId int, slug string) error {
defer util.HandlePanicInModuleThen("library/fillermanager/FetchAndStoreFillerDataFromSlug", func() {
})
fillerData, err := fm.fillerApi.FindFillerData(slug)
if err != nil {
return err
}
err = fm.db.InsertMediaFiller(
"animefillerlist",
mediaId,
slug,
time.Now(),
fillerData.FillerEpisodes,
)
if err != nil {
return err
}
return nil
}
func (fm *FillerManager) StoreFillerData(source string, slug string, mediaId int, fillerEpisodes []string) error {
defer util.HandlePanicInModuleThen("library/fillermanager/StoreFillerDataForMedia", func() {
})
return fm.db.InsertMediaFiller(
source,
mediaId,
slug,
time.Now(),
fillerEpisodes,
)
}
func (fm *FillerManager) RemoveFillerData(mediaId int) error {
defer util.HandlePanicInModuleThen("library/fillermanager/RemoveFillerData", func() {
})
fm.logger.Debug().Int("mediaId", mediaId).Msg("fillermanager: Removing filler data")
return fm.db.DeleteMediaFiller(mediaId)
}
func (fm *FillerManager) IsEpisodeFiller(mediaId int, episodeNumber int) bool {
defer util.HandlePanicInModuleThen("library/fillermanager/IsEpisodeFiller", func() {
})
mediaFillerData, ok := fm.db.GetMediaFillerItem(mediaId)
if !ok {
return false
}
if len(mediaFillerData.FillerEpisodes) == 0 {
return false
}
for _, ep := range mediaFillerData.FillerEpisodes {
if ep == strconv.Itoa(episodeNumber) {
return true
}
}
return false
}
func (fm *FillerManager) HydrateFillerData(e *anime.Entry) {
if fm == nil {
return
}
if e == nil || e.Media == nil || e.Episodes == nil || len(e.Episodes) == 0 {
return
}
event := &HydrateFillerDataRequestedEvent{
Entry: e,
}
_ = hook.GlobalHookManager.OnHydrateFillerDataRequested().Trigger(event)
if event.DefaultPrevented {
return
}
e = event.Entry
// Check if the filler data has been fetched
if !fm.HasFillerFetched(e.Media.ID) {
return
}
lop.ForEach(e.Episodes, func(ep *anime.Episode, _ int) {
if ep == nil || ep.EpisodeMetadata == nil {
return
}
ep.EpisodeMetadata.IsFiller = fm.IsEpisodeFiller(e.Media.ID, ep.EpisodeNumber)
})
}
func (fm *FillerManager) HydrateOnlinestreamFillerData(mId int, episodes []*onlinestream.Episode) {
if fm == nil {
return
}
if episodes == nil || len(episodes) == 0 {
return
}
event := &HydrateOnlinestreamFillerDataRequestedEvent{
Episodes: episodes,
}
_ = hook.GlobalHookManager.OnHydrateOnlinestreamFillerDataRequested().Trigger(event)
if event.DefaultPrevented {
return
}
episodes = event.Episodes
// Check if the filler data has been fetched
if !fm.HasFillerFetched(mId) {
return
}
for _, ep := range episodes {
ep.IsFiller = fm.IsEpisodeFiller(mId, ep.Number)
}
}
func (fm *FillerManager) HydrateEpisodeFillerData(mId int, episodes []*anime.Episode) {
if fm == nil || len(episodes) == 0 {
return
}
event := &HydrateEpisodeFillerDataRequestedEvent{
Episodes: episodes,
}
_ = hook.GlobalHookManager.OnHydrateEpisodeFillerDataRequested().Trigger(event)
if event.DefaultPrevented {
return
}
episodes = event.Episodes
// Check if the filler data has been fetched
if !fm.HasFillerFetched(mId) {
return
}
lop.ForEach(episodes, func(e *anime.Episode, _ int) {
//h.App.FillerManager.HydrateEpisodeFillerData(mId, e)
e.EpisodeMetadata.IsFiller = fm.IsEpisodeFiller(mId, e.EpisodeNumber)
})
}

View File

@@ -0,0 +1,31 @@
package fillermanager
import (
"seanime/internal/hook_resolver"
"seanime/internal/library/anime"
"seanime/internal/onlinestream"
)
// HydrateFillerDataRequestedEvent is triggered when the filler manager requests to hydrate the filler data for an entry.
// This is used by the local file episode list.
// Prevent default to skip the default behavior and return your own data.
type HydrateFillerDataRequestedEvent struct {
hook_resolver.Event
Entry *anime.Entry `json:"entry"`
}
// HydrateOnlinestreamFillerDataRequestedEvent is triggered when the filler manager requests to hydrate the filler data for online streaming episodes.
// This is used by the online streaming episode list.
// Prevent default to skip the default behavior and return your own data.
type HydrateOnlinestreamFillerDataRequestedEvent struct {
hook_resolver.Event
Episodes []*onlinestream.Episode `json:"episodes"`
}
// HydrateEpisodeFillerDataRequestedEvent is triggered when the filler manager requests to hydrate the filler data for specific episodes.
// This is used by the torrent and debrid streaming episode list.
// Prevent default to skip the default behavior and return your own data.
type HydrateEpisodeFillerDataRequestedEvent struct {
hook_resolver.Event
Episodes []*anime.Episode `json:"episodes"`
}

View File

@@ -0,0 +1,60 @@
package playbackmanager
import (
"seanime/internal/api/anilist"
"seanime/internal/hook_resolver"
"seanime/internal/library/anime"
)
// LocalFilePlaybackRequestedEvent is triggered when a local file is requested to be played.
// Prevent default to skip the default playback and override the playback.
type LocalFilePlaybackRequestedEvent struct {
hook_resolver.Event
Path string `json:"path"`
}
// StreamPlaybackRequestedEvent is triggered when a stream is requested to be played.
// Prevent default to skip the default playback and override the playback.
type StreamPlaybackRequestedEvent struct {
hook_resolver.Event
WindowTitle string `json:"windowTitle"`
Payload string `json:"payload"`
Media *anilist.BaseAnime `json:"media"`
AniDbEpisode string `json:"aniDbEpisode"`
}
// PlaybackBeforeTrackingEvent is triggered just before the playback tracking starts.
// Prevent default to skip playback tracking.
type PlaybackBeforeTrackingEvent struct {
hook_resolver.Event
IsStream bool `json:"isStream"`
}
// PlaybackLocalFileDetailsRequestedEvent is triggered when the local files details for a specific path are requested.
// This event is triggered right after the media player loads an episode.
// The playback manager uses the local files details to track the progress, propose next episodes, etc.
// In the current implementation, the details are fetched by selecting the local file from the database and making requests to retrieve the media and anime list entry.
// Prevent default to skip the default fetching and override the details.
type PlaybackLocalFileDetailsRequestedEvent struct {
hook_resolver.Event
Path string `json:"path"`
// List of all local files
LocalFiles []*anime.LocalFile `json:"localFiles"`
// Empty anime list entry
AnimeListEntry *anilist.AnimeListEntry `json:"animeListEntry"`
// Empty local file
LocalFile *anime.LocalFile `json:"localFile"`
// Empty local file wrapper entry
LocalFileWrapperEntry *anime.LocalFileWrapperEntry `json:"localFileWrapperEntry"`
}
// PlaybackStreamDetailsRequestedEvent is triggered when the stream details are requested.
// Prevent default to skip the default fetching and override the details.
// In the current implementation, the details are fetched by selecting the anime from the anime collection. If nothing is found, the stream is still tracked.
type PlaybackStreamDetailsRequestedEvent struct {
hook_resolver.Event
AnimeCollection *anilist.AnimeCollection `json:"animeCollection"`
MediaId int `json:"mediaId"`
// Empty anime list entry
AnimeListEntry *anilist.AnimeListEntry `json:"animeListEntry"`
}

View File

@@ -0,0 +1,143 @@
package playbackmanager
import (
"context"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/events"
"seanime/internal/util"
"time"
"github.com/samber/mo"
)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Manual progress tracking
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type ManualTrackingState struct {
EpisodeNumber int
MediaId int
CurrentProgress int
TotalEpisodes int
}
type StartManualProgressTrackingOptions struct {
ClientId string
MediaId int
EpisodeNumber int
}
func (pm *PlaybackManager) CancelManualProgressTracking() {
pm.mu.Lock()
defer pm.mu.Unlock()
if pm.manualTrackingCtxCancel != nil {
pm.manualTrackingCtxCancel()
pm.currentManualTrackingState = mo.None[*ManualTrackingState]()
}
}
func (pm *PlaybackManager) StartManualProgressTracking(opts *StartManualProgressTrackingOptions) (err error) {
defer util.HandlePanicInModuleWithError("library/playbackmanager/StartManualProgressTracking", &err)
ctx := context.Background()
pm.mu.Lock()
defer pm.mu.Unlock()
pm.Logger.Trace().Msg("playback manager: Starting manual progress tracking")
// Cancel manual tracking if active
if pm.manualTrackingCtxCancel != nil {
pm.Logger.Trace().Msg("playback manager: Cancelling previous manual tracking context")
pm.manualTrackingCtxCancel()
pm.manualTrackingWg.Wait()
}
// Get the media
// - Find the media in the collection
animeCollection, err := pm.platform.GetAnimeCollection(ctx, false)
if err != nil {
return err
}
var media *anilist.BaseAnime
var currentProgress int
var totalEpisodes int
listEntry, found := animeCollection.GetListEntryFromAnimeId(opts.MediaId)
if found {
media = listEntry.Media
} else {
// Fetch the media from AniList
media, err = pm.platform.GetAnime(ctx, opts.MediaId)
}
if media == nil {
pm.Logger.Error().Msg("playback manager: Media not found for manual tracking")
return fmt.Errorf("media not found")
}
currentProgress = 0
if listEntry != nil && listEntry.GetProgress() != nil {
currentProgress = *listEntry.GetProgress()
}
totalEpisodes = media.GetTotalEpisodeCount()
// Set the current playback type (for progress update later on)
pm.currentPlaybackType = ManualTrackingPlayback
// Set the manual tracking state (for progress update later on)
pm.currentManualTrackingState = mo.Some(&ManualTrackingState{
EpisodeNumber: opts.EpisodeNumber,
MediaId: opts.MediaId,
CurrentProgress: currentProgress,
TotalEpisodes: totalEpisodes,
})
pm.Logger.Trace().
Int("episode_number", opts.EpisodeNumber).
Int("mediaId", opts.MediaId).
Int("currentProgress", currentProgress).
Int("totalEpisodes", totalEpisodes).
Msg("playback manager: Starting manual progress tracking")
// Start sending the manual tracking events
pm.manualTrackingWg.Add(1)
go func() {
defer pm.manualTrackingWg.Done()
// Create a new context
pm.manualTrackingCtx, pm.manualTrackingCtxCancel = context.WithCancel(context.Background())
defer func() {
if pm.manualTrackingCtxCancel != nil {
pm.manualTrackingCtxCancel()
}
}()
for {
select {
case <-pm.manualTrackingCtx.Done():
pm.Logger.Debug().Msg("playback manager: Manual progress tracking canceled")
pm.wsEventManager.SendEvent(events.PlaybackManagerManualTrackingStopped, nil)
return
default:
ps := playbackStatePool.Get().(*PlaybackState)
ps.EpisodeNumber = opts.EpisodeNumber
ps.MediaTitle = *media.GetTitle().GetUserPreferred()
ps.MediaTotalEpisodes = totalEpisodes
ps.Filename = ""
ps.CompletionPercentage = 0
ps.CanPlayNext = false
ps.ProgressUpdated = false
ps.MediaId = opts.MediaId
pm.wsEventManager.SendEvent(events.PlaybackManagerManualTrackingPlaybackState, ps)
playbackStatePool.Put(ps)
// Continuously send the progress to the client
time.Sleep(3 * time.Second)
}
}
}()
return nil
}

View File

@@ -0,0 +1,98 @@
package playbackmanager
import (
"context"
"fmt"
"seanime/internal/database/db_bridge"
"seanime/internal/library/anime"
"github.com/samber/lo"
)
type StartRandomVideoOptions struct {
UserAgent string
ClientId string
}
// StartRandomVideo starts a random video from the collection.
// Note that this might now be suited if the user has multiple seasons of the same anime.
func (pm *PlaybackManager) StartRandomVideo(opts *StartRandomVideoOptions) error {
pm.playlistHub.reset()
if err := pm.checkOrLoadAnimeCollection(); err != nil {
return err
}
animeCollection, err := pm.platform.GetAnimeCollection(context.Background(), false)
if err != nil {
return err
}
//
// Retrieve random episode
//
// Get lfs
lfs, _, err := db_bridge.GetLocalFiles(pm.Database)
if err != nil {
return fmt.Errorf("error getting local files: %s", err.Error())
}
// Create a local file wrapper
lfw := anime.NewLocalFileWrapper(lfs)
// Get entries (grouped by media id)
lfEntries := lfw.GetLocalEntries()
lfEntries = lo.Filter(lfEntries, func(e *anime.LocalFileWrapperEntry, _ int) bool {
return e.HasMainLocalFiles()
})
if len(lfEntries) == 0 {
return fmt.Errorf("no playable media found")
}
continueLfs := make([]*anime.LocalFile, 0)
otherLfs := make([]*anime.LocalFile, 0)
for _, e := range lfEntries {
anilistEntry, ok := animeCollection.GetListEntryFromAnimeId(e.GetMediaId())
if !ok {
continue
}
progress := 0
if anilistEntry.Progress != nil {
progress = *anilistEntry.Progress
}
if anilistEntry.Status == nil || *anilistEntry.Status == "COMPLETED" {
continue
}
firstUnwatchedFile, found := e.GetFirstUnwatchedLocalFiles(progress)
if !found {
continue
}
if *anilistEntry.Status == "CURRENT" || *anilistEntry.Status == "REPEATING" {
continueLfs = append(continueLfs, firstUnwatchedFile)
} else {
otherLfs = append(otherLfs, firstUnwatchedFile)
}
}
if len(continueLfs) == 0 && len(otherLfs) == 0 {
return fmt.Errorf("no playable file found")
}
lfs = append(continueLfs, otherLfs...)
// only choose from continueLfs if there are more than 8 episodes
if len(continueLfs) > 8 {
lfs = continueLfs
}
lfs = lo.Shuffle(lfs)
err = pm.StartPlayingUsingMediaPlayer(&StartPlayingOptions{
Payload: lfs[0].GetPath(),
UserAgent: opts.UserAgent,
ClientId: opts.ClientId,
})
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,731 @@
package playbackmanager
import (
"context"
"errors"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/database/db_bridge"
discordrpc_presence "seanime/internal/discordrpc/presence"
"seanime/internal/events"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/mediaplayers/mediaplayer"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/result"
"sync"
"sync/atomic"
"github.com/google/uuid"
"github.com/rs/zerolog"
"github.com/samber/mo"
)
const (
LocalFilePlayback PlaybackType = "localfile"
StreamPlayback PlaybackType = "stream"
ManualTrackingPlayback PlaybackType = "manual"
)
var playbackStatePool = sync.Pool{
New: func() interface{} {
return &PlaybackState{}
},
}
type (
PlaybackType string
// PlaybackManager manages video playback (local and stream) and progress tracking for desktop media players.
// It receives and dispatch appropriate events for:
// - Syncing progress with AniList, etc.
// - Sending notifications to the client
PlaybackManager struct {
Logger *zerolog.Logger
Database *db.Database
MediaPlayerRepository *mediaplayer.Repository // MediaPlayerRepository is used to control the media player
continuityManager *continuity.Manager
settings *Settings
discordPresence *discordrpc_presence.Presence // DiscordPresence is used to update the user's Discord presence
mediaPlayerRepoSubscriber *mediaplayer.RepositorySubscriber // Used to listen for media player events
wsEventManager events.WSEventManagerInterface
platform platform.Platform
metadataProvider metadata.Provider
refreshAnimeCollectionFunc func() // This function is called to refresh the AniList collection
mu sync.Mutex
eventMu sync.RWMutex
cancel context.CancelFunc
// historyMap stores a PlaybackState whose state is "completed"
// Since PlaybackState is sent to client continuously, once a PlaybackState is stored in historyMap, only IT will be sent to the client.
// This is so when the user seeks back to a video, the client can show the last known "completed" state of the video
historyMap map[string]PlaybackState
currentPlaybackType PlaybackType
currentMediaPlaybackStatus *mediaplayer.PlaybackStatus // The current video playback status (can be nil)
autoPlayMu sync.Mutex
nextEpisodeLocalFile mo.Option[*anime.LocalFile] // The next episode's local file (for local file playback)
// currentMediaListEntry for Local file playback & stream playback
// For Local file playback, it MUST be set
// For Stream playback, it is optional
// See [progress_tracking.go] for how it is handled
currentMediaListEntry mo.Option[*anilist.AnimeListEntry] // List Entry for the current video playback
// \/ Local file playback
currentLocalFile mo.Option[*anime.LocalFile] // Local file for the current video playback
currentLocalFileWrapperEntry mo.Option[*anime.LocalFileWrapperEntry] // This contains the current media entry local file data
// \/ Stream playback
// The current episode being streamed, set in [StartStreamingUsingMediaPlayer] by finding the episode in currentStreamEpisodeCollection
currentStreamEpisode mo.Option[*anime.Episode]
// The current media being streamed, set in [StartStreamingUsingMediaPlayer]
currentStreamMedia mo.Option[*anilist.BaseAnime]
currentStreamAniDbEpisode mo.Option[string]
// \/ Manual progress tracking (non-integrated external player)
manualTrackingCtx context.Context
manualTrackingCtxCancel context.CancelFunc
manualTrackingPlaybackState PlaybackState
currentManualTrackingState mo.Option[*ManualTrackingState]
manualTrackingWg sync.WaitGroup
// \/ Playlist
playlistHub *playlistHub // The playlist hub
isOffline *bool
animeCollection mo.Option[*anilist.AnimeCollection]
playbackStatusSubscribers *result.Map[string, *PlaybackStatusSubscriber]
}
// PlaybackStatusSubscriber provides a single event channel for all playback events
PlaybackStatusSubscriber struct {
EventCh chan PlaybackEvent
canceled atomic.Bool
}
// PlaybackEvent is the base interface for all playback events
PlaybackEvent interface {
Type() string
}
PlaybackStartingEvent struct {
Filepath string
PlaybackType PlaybackType
Media *anilist.BaseAnime
AniDbEpisode string
EpisodeNumber int
WindowTitle string
}
// Local file playback events
PlaybackStatusChangedEvent struct {
Status mediaplayer.PlaybackStatus
State PlaybackState
}
VideoStartedEvent struct {
Filename string
Filepath string
}
VideoStoppedEvent struct {
Reason string
}
VideoCompletedEvent struct {
Filename string
}
// Stream playback events
StreamStateChangedEvent struct {
State PlaybackState
}
StreamStatusChangedEvent struct {
Status mediaplayer.PlaybackStatus
}
StreamStartedEvent struct {
Filename string
Filepath string
}
StreamStoppedEvent struct {
Reason string
}
StreamCompletedEvent struct {
Filename string
}
PlaybackStateType string
// PlaybackState is used to keep track of the user's current video playback
// It is sent to the client each time the video playback state is picked up -- this is used to update the client's UI
PlaybackState struct {
EpisodeNumber int `json:"episodeNumber"` // The episode number
AniDbEpisode string `json:"aniDbEpisode"` // The AniDB episode number
MediaTitle string `json:"mediaTitle"` // The title of the media
MediaCoverImage string `json:"mediaCoverImage"` // The cover image of the media
MediaTotalEpisodes int `json:"mediaTotalEpisodes"` // The total number of episodes
Filename string `json:"filename"` // The filename
CompletionPercentage float64 `json:"completionPercentage"` // The completion percentage
CanPlayNext bool `json:"canPlayNext"` // Whether the next episode can be played
ProgressUpdated bool `json:"progressUpdated"` // Whether the progress has been updated
MediaId int `json:"mediaId"` // The media ID
}
NewPlaybackManagerOptions struct {
WSEventManager events.WSEventManagerInterface
Logger *zerolog.Logger
Platform platform.Platform
MetadataProvider metadata.Provider
Database *db.Database
RefreshAnimeCollectionFunc func() // This function is called to refresh the AniList collection
DiscordPresence *discordrpc_presence.Presence
IsOffline *bool
ContinuityManager *continuity.Manager
}
Settings struct {
AutoPlayNextEpisode bool
}
)
// Event type implementations
func (e PlaybackStatusChangedEvent) Type() string { return "playback_status_changed" }
func (e VideoStartedEvent) Type() string { return "video_started" }
func (e VideoStoppedEvent) Type() string { return "video_stopped" }
func (e VideoCompletedEvent) Type() string { return "video_completed" }
func (e StreamStateChangedEvent) Type() string { return "stream_state_changed" }
func (e StreamStatusChangedEvent) Type() string { return "stream_status_changed" }
func (e StreamStartedEvent) Type() string { return "stream_started" }
func (e StreamStoppedEvent) Type() string { return "stream_stopped" }
func (e StreamCompletedEvent) Type() string { return "stream_completed" }
func (e PlaybackStartingEvent) Type() string { return "playback_starting" }
func New(opts *NewPlaybackManagerOptions) *PlaybackManager {
pm := &PlaybackManager{
Logger: opts.Logger,
Database: opts.Database,
settings: &Settings{},
discordPresence: opts.DiscordPresence,
wsEventManager: opts.WSEventManager,
platform: opts.Platform,
metadataProvider: opts.MetadataProvider,
refreshAnimeCollectionFunc: opts.RefreshAnimeCollectionFunc,
mu: sync.Mutex{},
autoPlayMu: sync.Mutex{},
eventMu: sync.RWMutex{},
historyMap: make(map[string]PlaybackState),
isOffline: opts.IsOffline,
nextEpisodeLocalFile: mo.None[*anime.LocalFile](),
currentStreamEpisode: mo.None[*anime.Episode](),
currentStreamMedia: mo.None[*anilist.BaseAnime](),
currentStreamAniDbEpisode: mo.None[string](),
animeCollection: mo.None[*anilist.AnimeCollection](),
currentManualTrackingState: mo.None[*ManualTrackingState](),
currentLocalFile: mo.None[*anime.LocalFile](),
currentLocalFileWrapperEntry: mo.None[*anime.LocalFileWrapperEntry](),
currentMediaListEntry: mo.None[*anilist.AnimeListEntry](),
continuityManager: opts.ContinuityManager,
playbackStatusSubscribers: result.NewResultMap[string, *PlaybackStatusSubscriber](),
}
pm.playlistHub = newPlaylistHub(pm)
return pm
}
func (pm *PlaybackManager) SetAnimeCollection(ac *anilist.AnimeCollection) {
pm.animeCollection = mo.Some(ac)
}
func (pm *PlaybackManager) SetSettings(s *Settings) {
pm.settings = s
}
// SetMediaPlayerRepository sets the media player repository and starts listening to media player events
// - This method is called when the media player is mounted (due to settings change or when the app starts)
func (pm *PlaybackManager) SetMediaPlayerRepository(mediaPlayerRepository *mediaplayer.Repository) {
go func() {
// If a previous context exists, cancel it
if pm.cancel != nil {
pm.cancel()
}
pm.playlistHub.reset()
// Create a new context for listening to the MediaPlayer instance's event
// When this is canceled above, the previous listener goroutine will stop -- this is done to prevent multiple listeners
var ctx context.Context
ctx, pm.cancel = context.WithCancel(context.Background())
pm.mu.Lock()
// Set the new media player repository instance
pm.MediaPlayerRepository = mediaPlayerRepository
// Set up event listeners for the media player instance
pm.mediaPlayerRepoSubscriber = pm.MediaPlayerRepository.Subscribe("playbackmanager")
pm.mu.Unlock()
// Start listening to new media player events
pm.listenToMediaPlayerEvents(ctx)
// DEVNOTE: pm.listenToClientPlayerEvents()
}()
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type StartPlayingOptions struct {
Payload string // url or path
UserAgent string
ClientId string
}
func (pm *PlaybackManager) StartPlayingUsingMediaPlayer(opts *StartPlayingOptions) error {
event := &LocalFilePlaybackRequestedEvent{
Path: opts.Payload,
}
err := hook.GlobalHookManager.OnLocalFilePlaybackRequested().Trigger(event)
if err != nil {
return err
}
opts.Payload = event.Path
if event.DefaultPrevented {
pm.Logger.Debug().Msg("playback manager: Local file playback prevented by hook")
return nil
}
pm.playlistHub.reset()
if err := pm.checkOrLoadAnimeCollection(); err != nil {
return err
}
// Cancel manual tracking if active
if pm.manualTrackingCtxCancel != nil {
pm.manualTrackingCtxCancel()
}
// Send the media file to the media player
err = pm.MediaPlayerRepository.Play(opts.Payload)
if err != nil {
return err
}
trackingEvent := &PlaybackBeforeTrackingEvent{
IsStream: false,
}
err = hook.GlobalHookManager.OnPlaybackBeforeTracking().Trigger(trackingEvent)
if err != nil {
return err
}
if trackingEvent.DefaultPrevented {
return nil
}
// Start tracking
pm.MediaPlayerRepository.StartTracking()
return nil
}
// StartUntrackedStreamingUsingMediaPlayer starts a stream using the media player without any tracking.
func (pm *PlaybackManager) StartUntrackedStreamingUsingMediaPlayer(windowTitle string, opts *StartPlayingOptions) (err error) {
defer util.HandlePanicInModuleWithError("library/playbackmanager/StartUntrackedStreamingUsingMediaPlayer", &err)
event := &StreamPlaybackRequestedEvent{
WindowTitle: windowTitle,
Payload: opts.Payload,
Media: nil,
AniDbEpisode: "",
}
err = hook.GlobalHookManager.OnStreamPlaybackRequested().Trigger(event)
if err != nil {
return err
}
if event.DefaultPrevented {
pm.Logger.Debug().Msg("playback manager: Stream playback prevented by hook")
return nil
}
pm.Logger.Trace().Msg("playback manager: Starting the media player")
pm.mu.Lock()
defer pm.mu.Unlock()
episodeNumber := 0
err = pm.MediaPlayerRepository.Stream(opts.Payload, episodeNumber, 0, windowTitle)
if err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Failed to start streaming")
return err
}
pm.Logger.Trace().Msg("playback manager: Sent stream to media player")
return nil
}
// StartStreamingUsingMediaPlayer starts streaming a video using the media player.
// This sets PlaybackManager.currentStreamMedia and PlaybackManager.currentStreamEpisode used for progress tracking.
// Note that PlaybackManager.currentStreamEpisodeCollection is not required to start streaming but is needed for progress tracking.
func (pm *PlaybackManager) StartStreamingUsingMediaPlayer(windowTitle string, opts *StartPlayingOptions, media *anilist.BaseAnime, aniDbEpisode string) (err error) {
defer util.HandlePanicInModuleWithError("library/playbackmanager/StartStreamingUsingMediaPlayer", &err)
event := &StreamPlaybackRequestedEvent{
WindowTitle: windowTitle,
Payload: opts.Payload,
Media: media,
AniDbEpisode: aniDbEpisode,
}
err = hook.GlobalHookManager.OnStreamPlaybackRequested().Trigger(event)
if err != nil {
return err
}
aniDbEpisode = event.AniDbEpisode
windowTitle = event.WindowTitle
if event.DefaultPrevented {
pm.Logger.Debug().Msg("playback manager: Stream playback prevented by hook")
return nil
}
pm.playlistHub.reset()
if *pm.isOffline {
return errors.New("cannot stream when offline")
}
if event.Media == nil || aniDbEpisode == "" {
pm.Logger.Error().Msg("playback manager: cannot start streaming, missing options [StartStreamingUsingMediaPlayer]")
return errors.New("cannot start streaming, not enough data provided")
}
pm.Logger.Trace().Msg("playback manager: Starting the media player")
pm.mu.Lock()
defer pm.mu.Unlock()
// Cancel manual tracking if active
if pm.manualTrackingCtxCancel != nil {
pm.manualTrackingCtxCancel()
}
pm.currentStreamMedia = mo.Some(event.Media)
episodeNumber := 0
// Find the current episode being stream
episodeCollection, err := anime.NewEpisodeCollection(anime.NewEpisodeCollectionOptions{
AnimeMetadata: nil,
Media: event.Media,
MetadataProvider: pm.metadataProvider,
Logger: pm.Logger,
})
pm.currentStreamAniDbEpisode = mo.Some(aniDbEpisode)
if episode, ok := episodeCollection.FindEpisodeByAniDB(aniDbEpisode); ok {
episodeNumber = episode.EpisodeNumber
pm.currentStreamEpisode = mo.Some(episode)
} else {
pm.Logger.Warn().Str("episode", aniDbEpisode).Msg("playback manager: Failed to find episode in episode collection")
}
err = pm.MediaPlayerRepository.Stream(event.Payload, episodeNumber, event.Media.ID, windowTitle)
if err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Failed to start streaming")
return err
}
pm.Logger.Trace().Msg("playback manager: Sent stream to media player")
trackingEvent := &PlaybackBeforeTrackingEvent{
IsStream: true,
}
err = hook.GlobalHookManager.OnPlaybackBeforeTracking().Trigger(trackingEvent)
if err != nil {
return err
}
if trackingEvent.DefaultPrevented {
return nil
}
pm.MediaPlayerRepository.StartTrackingTorrentStream()
pm.Logger.Trace().Msg("playback manager: Started tracking torrent stream")
return nil
}
// PlayNextEpisode plays the next episode of the local media that is being watched
// - Called when the user clicks on the "Next" button in the client
// - Should not be called when the user is watching a playlist
// - Should not be called when no next episode is available
func (pm *PlaybackManager) PlayNextEpisode() (err error) {
defer util.HandlePanicInModuleWithError("library/playbackmanager/PlayNextEpisode", &err)
switch pm.currentPlaybackType {
case LocalFilePlayback:
if pm.currentLocalFile.IsAbsent() || pm.currentMediaListEntry.IsAbsent() || pm.currentLocalFileWrapperEntry.IsAbsent() {
return errors.New("could not play next episode")
}
nextLf, found := pm.currentLocalFileWrapperEntry.MustGet().FindNextEpisode(pm.currentLocalFile.MustGet())
if !found {
return errors.New("could not play next episode")
}
err = pm.MediaPlayerRepository.Play(nextLf.Path)
if err != nil {
return err
}
// Start tracking the video
pm.MediaPlayerRepository.StartTracking()
case StreamPlayback:
// TODO: Implement it for torrentstream
// Check if torrent stream etc...
}
return nil
}
// GetNextEpisode gets the next [anime.LocalFile] of the local media that is being watched.
// It will return nil if there is no next episode.
// This is used by the client's "Auto Play" feature.
func (pm *PlaybackManager) GetNextEpisode() (ret *anime.LocalFile) {
defer util.HandlePanicInModuleThen("library/playbackmanager/GetNextEpisode", func() {
ret = nil
})
switch pm.currentPlaybackType {
case LocalFilePlayback:
if lf, found := pm.nextEpisodeLocalFile.Get(); found {
ret = lf
}
return
}
return nil
}
// AutoPlayNextEpisode will play the next episode of the local media that is being watched.
// This calls [PlaybackManager.PlayNextEpisode] only once if multiple clients made the request.
func (pm *PlaybackManager) AutoPlayNextEpisode() error {
pm.autoPlayMu.Lock()
defer pm.autoPlayMu.Unlock()
pm.Logger.Trace().Msg("playback manager: Auto play request received")
if !pm.settings.AutoPlayNextEpisode {
return nil
}
lf := pm.GetNextEpisode()
// This shouldn't happen because the client should check if there is a next episode before sending the request.
// However, it will happen if there are multiple clients launching the request.
if lf == nil {
pm.Logger.Warn().Msg("playback manager: No next episode to play")
return nil
}
if err := pm.PlayNextEpisode(); err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Failed to auto play next episode")
return fmt.Errorf("failed to auto play next episode: %w", err)
}
// Remove the next episode from the queue
pm.nextEpisodeLocalFile = mo.None[*anime.LocalFile]()
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Pause pauses the current media player playback.
func (pm *PlaybackManager) Pause() error {
return pm.MediaPlayerRepository.Pause()
}
// Resume resumes the current media player playback.
func (pm *PlaybackManager) Resume() error {
return pm.MediaPlayerRepository.Resume()
}
// Seek seeks to the specified time in the current media.
func (pm *PlaybackManager) Seek(seconds float64) error {
return pm.MediaPlayerRepository.Seek(seconds)
}
// PullStatus pulls the current media player playback status at the time of the call.
func (pm *PlaybackManager) PullStatus() (*mediaplayer.PlaybackStatus, bool) {
return pm.MediaPlayerRepository.PullStatus()
}
// Cancel stops the current media player playback and publishes a "normal" event.
func (pm *PlaybackManager) Cancel() error {
pm.MediaPlayerRepository.Stop()
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Playlist
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// CancelCurrentPlaylist cancels the current playlist.
// This is an action triggered by the client.
func (pm *PlaybackManager) CancelCurrentPlaylist() error {
go pm.playlistHub.reset()
return nil
}
// RequestNextPlaylistFile will play the next file in the playlist.
// This is an action triggered by the client.
func (pm *PlaybackManager) RequestNextPlaylistFile() error {
go pm.playlistHub.playNextFile()
return nil
}
// StartPlaylist starts a playlist.
// This action is triggered by the client.
func (pm *PlaybackManager) StartPlaylist(playlist *anime.Playlist) (err error) {
defer util.HandlePanicInModuleWithError("library/playbackmanager/StartPlaylist", &err)
pm.playlistHub.loadPlaylist(playlist)
_ = pm.checkOrLoadAnimeCollection()
// Play the first video in the playlist
firstVidPath := playlist.LocalFiles[0].Path
err = pm.MediaPlayerRepository.Play(firstVidPath)
if err != nil {
return err
}
// Start tracking the video
pm.MediaPlayerRepository.StartTracking()
// Create a new context for the playlist hub
var ctx context.Context
ctx, pm.playlistHub.cancel = context.WithCancel(context.Background())
// Listen to new play requests
go func() {
pm.Logger.Debug().Msg("playback manager: Listening for new file requests")
for {
select {
// When the playlist hub context is cancelled (No playlist is being played)
case <-ctx.Done():
pm.Logger.Debug().Msg("playback manager: Playlist context cancelled")
// Send event to the client -- nil signals that no playlist is being played
pm.wsEventManager.SendEvent(events.PlaybackManagerPlaylistState, nil)
return
case path := <-pm.playlistHub.requestNewFileCh:
// requestNewFileCh receives the path of the next video to play
// The channel is fed when it's time to play the next video or when the client requests the next video
// see: RequestNextPlaylistFile, playlistHub code
pm.Logger.Debug().Str("path", path).Msg("playback manager: Playing next file")
// Send notification to the client
pm.wsEventManager.SendEvent(events.InfoToast, "Playing next file in playlist")
// Play the requested video
err := pm.MediaPlayerRepository.Play(path)
if err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Failed to play next file in playlist")
pm.playlistHub.cancel()
return
}
// Start tracking the video
pm.MediaPlayerRepository.StartTracking()
case <-pm.playlistHub.endOfPlaylistCh:
pm.Logger.Debug().Msg("playback manager: End of playlist")
pm.wsEventManager.SendEvent(events.InfoToast, "End of playlist")
// Send event to the client -- nil signals that no playlist is being played
pm.wsEventManager.SendEvent(events.PlaybackManagerPlaylistState, nil)
go pm.MediaPlayerRepository.Stop()
pm.playlistHub.cancel()
return
}
}
}()
// Delete playlist in goroutine
go func() {
err := db_bridge.DeletePlaylist(pm.Database, playlist.DbId)
if err != nil {
pm.Logger.Error().Err(err).Str("name", playlist.Name).Msgf("playback manager: Failed to delete playlist")
return
}
pm.Logger.Debug().Str("name", playlist.Name).Msgf("playback manager: Deleted playlist")
}()
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (pm *PlaybackManager) checkOrLoadAnimeCollection() (err error) {
defer util.HandlePanicInModuleWithError("library/playbackmanager/checkOrLoadAnimeCollection", &err)
if pm.animeCollection.IsAbsent() {
// If the anime collection is not present, we retrieve it from the platform
collection, err := pm.platform.GetAnimeCollection(context.Background(), false)
if err != nil {
return err
}
pm.animeCollection = mo.Some(collection)
}
return nil
}
func (pm *PlaybackManager) SubscribeToPlaybackStatus(id string) *PlaybackStatusSubscriber {
subscriber := &PlaybackStatusSubscriber{
EventCh: make(chan PlaybackEvent, 100),
}
pm.playbackStatusSubscribers.Set(id, subscriber)
return subscriber
}
func (pm *PlaybackManager) RegisterMediaPlayerCallback(callback func(event PlaybackEvent, cancelFunc func())) (cancel func()) {
id := uuid.NewString()
playbackSubscriber := pm.SubscribeToPlaybackStatus(id)
cancel = func() {
pm.UnsubscribeFromPlaybackStatus(id)
}
go func(playbackSubscriber *PlaybackStatusSubscriber) {
for event := range playbackSubscriber.EventCh {
callback(event, cancel)
}
}(playbackSubscriber)
return cancel
}
func (pm *PlaybackManager) UnsubscribeFromPlaybackStatus(id string) {
defer func() {
if r := recover(); r != nil {
pm.Logger.Warn().Msg("playback manager: Failed to unsubscribe from playback status")
}
}()
subscriber, ok := pm.playbackStatusSubscribers.Get(id)
if !ok {
return
}
subscriber.canceled.Store(true)
pm.playbackStatusSubscribers.Delete(id)
close(subscriber.EventCh)
}

View File

@@ -0,0 +1,57 @@
package playbackmanager_test
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/events"
"seanime/internal/library/playbackmanager"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
"github.com/stretchr/testify/require"
)
func getPlaybackManager(t *testing.T) (*playbackmanager.PlaybackManager, *anilist.AnimeCollection, error) {
logger := util.NewLogger()
wsEventManager := events.NewMockWSEventManager(logger)
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
if err != nil {
t.Fatalf("error while creating database, %v", err)
}
filecacher, err := filecache.NewCacher(t.TempDir())
require.NoError(t, err)
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), true)
metadataProvider := metadata.GetMockProvider(t)
require.NoError(t, err)
continuityManager := continuity.NewManager(&continuity.NewManagerOptions{
FileCacher: filecacher,
Logger: logger,
Database: database,
})
return playbackmanager.New(&playbackmanager.NewPlaybackManagerOptions{
WSEventManager: wsEventManager,
Logger: logger,
Platform: anilistPlatform,
MetadataProvider: metadataProvider,
Database: database,
RefreshAnimeCollectionFunc: func() {
// Do nothing
},
DiscordPresence: nil,
IsOffline: &[]bool{false}[0],
ContinuityManager: continuityManager,
}), animeCollection, nil
}

View File

@@ -0,0 +1,235 @@
package playbackmanager
import (
"context"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/events"
"seanime/internal/library/anime"
"sync"
"sync/atomic"
"github.com/rs/zerolog"
)
type (
playlistHub struct {
requestNewFileCh chan string
endOfPlaylistCh chan struct{}
wsEventManager events.WSEventManagerInterface
logger *zerolog.Logger
currentPlaylist *anime.Playlist // The current playlist that is being played (can be nil)
nextLocalFile *anime.LocalFile // The next episode that will be played (can be nil)
cancel context.CancelFunc // The cancel function for the current playlist
mu sync.Mutex // The mutex
playingLf *anime.LocalFile // The currently playing local file
playingMediaListEntry *anilist.AnimeListEntry // The currently playing media entry
completedCurrent atomic.Bool // Whether the current episode has been completed
currentState *PlaylistState // This is sent to the client to show the current playlist state
playbackManager *PlaybackManager
}
PlaylistState struct {
Current *PlaylistStateItem `json:"current"`
Next *PlaylistStateItem `json:"next"`
Remaining int `json:"remaining"`
}
PlaylistStateItem struct {
Name string `json:"name"`
MediaImage string `json:"mediaImage"`
}
)
func newPlaylistHub(pm *PlaybackManager) *playlistHub {
ret := &playlistHub{
logger: pm.Logger,
wsEventManager: pm.wsEventManager,
playbackManager: pm,
requestNewFileCh: make(chan string, 1),
endOfPlaylistCh: make(chan struct{}, 1),
completedCurrent: atomic.Bool{},
}
ret.completedCurrent.Store(false)
return ret
}
func (h *playlistHub) loadPlaylist(playlist *anime.Playlist) {
if playlist == nil {
h.logger.Error().Msg("playlist hub: Playlist is nil")
return
}
h.reset()
h.currentPlaylist = playlist
h.logger.Debug().Str("name", playlist.Name).Msg("playlist hub: Playlist loaded")
return
}
func (h *playlistHub) reset() {
if h.cancel != nil {
h.cancel()
}
h.currentPlaylist = nil
h.playingLf = nil
h.playingMediaListEntry = nil
h.currentState = nil
h.wsEventManager.SendEvent(events.PlaybackManagerPlaylistState, h.currentState)
return
}
func (h *playlistHub) check(currListEntry *anilist.AnimeListEntry, currLf *anime.LocalFile, ps PlaybackState) bool {
if h.currentPlaylist == nil || currLf == nil || currListEntry == nil {
h.currentPlaylist = nil
h.playingLf = nil
h.playingMediaListEntry = nil
return false
}
return true
}
func (h *playlistHub) findNextFile() (*anime.LocalFile, bool) {
if h.currentPlaylist == nil || h.playingLf == nil {
return nil, false
}
for i, lf := range h.currentPlaylist.LocalFiles {
if lf.GetNormalizedPath() == h.playingLf.GetNormalizedPath() {
if i+1 < len(h.currentPlaylist.LocalFiles) {
return h.currentPlaylist.LocalFiles[i+1], true
}
break
}
}
return nil, false
}
func (h *playlistHub) playNextFile() (*anime.LocalFile, bool) {
if h.currentPlaylist == nil || h.playingLf == nil || h.nextLocalFile == nil {
return nil, false
}
h.logger.Debug().Str("path", h.nextLocalFile.Path).Str("cmd", "playNextFile").Msg("playlist hub: Requesting next file")
h.requestNewFileCh <- h.nextLocalFile.Path
h.completedCurrent.Store(false)
return nil, false
}
func (h *playlistHub) onVideoStart(currListEntry *anilist.AnimeListEntry, currLf *anime.LocalFile, ps PlaybackState) {
if !h.check(currListEntry, currLf, ps) {
return
}
h.playingLf = currLf
h.playingMediaListEntry = currListEntry
h.nextLocalFile, _ = h.findNextFile()
if h.playbackManager.animeCollection.IsAbsent() {
return
}
// Refresh current playlist state
playlistState := &PlaylistState{}
playlistState.Current = &PlaylistStateItem{
Name: fmt.Sprintf("%s - Episode %d", currListEntry.GetMedia().GetPreferredTitle(), currLf.GetEpisodeNumber()),
MediaImage: currListEntry.GetMedia().GetCoverImageSafe(),
}
if h.nextLocalFile != nil {
lfe, found := h.playbackManager.animeCollection.MustGet().GetListEntryFromAnimeId(h.nextLocalFile.MediaId)
if found {
playlistState.Next = &PlaylistStateItem{
Name: fmt.Sprintf("%s - Episode %d", lfe.GetMedia().GetPreferredTitle(), h.nextLocalFile.GetEpisodeNumber()),
MediaImage: lfe.GetMedia().GetCoverImageSafe(),
}
}
}
remaining := 0
for i, lf := range h.currentPlaylist.LocalFiles {
if lf.GetNormalizedPath() == currLf.GetNormalizedPath() {
remaining = len(h.currentPlaylist.LocalFiles) - 1 - i
break
}
}
playlistState.Remaining = remaining
h.currentState = playlistState
h.completedCurrent.Store(false)
h.logger.Debug().Str("path", currLf.Path).Msgf("playlist hub: Video started")
return
}
func (h *playlistHub) onVideoCompleted(currListEntry *anilist.AnimeListEntry, currLf *anime.LocalFile, ps PlaybackState) {
if !h.check(currListEntry, currLf, ps) {
return
}
h.logger.Debug().Str("path", currLf.Path).Msgf("playlist hub: Video completed")
h.completedCurrent.Store(true)
return
}
func (h *playlistHub) onPlaybackStatus(currListEntry *anilist.AnimeListEntry, currLf *anime.LocalFile, ps PlaybackState) {
if !h.check(currListEntry, currLf, ps) {
return
}
h.wsEventManager.SendEvent(events.PlaybackManagerPlaylistState, h.currentState)
return
}
func (h *playlistHub) onTrackingStopped() {
if h.currentPlaylist == nil || h.playingLf == nil { // Return if no playlist
return
}
// When tracking has stopped, request next file
//if h.nextLocalFile != nil {
// h.logger.Debug().Str("path", h.nextLocalFile.Path).Msg("playlist hub: Requesting next file")
// h.requestNewFileCh <- h.nextLocalFile.Path
//} else {
// h.logger.Debug().Msg("playlist hub: End of playlist")
// h.endOfPlaylistCh <- struct{}{}
//}
h.logger.Debug().Msgf("playlist hub: Tracking stopped, completed current: %v", h.completedCurrent.Load())
if !h.completedCurrent.Load() {
h.reset()
}
return
}
func (h *playlistHub) onTrackingError() {
if h.currentPlaylist == nil { // Return if no playlist
return
}
// When tracking has stopped, request next file
h.logger.Debug().Msgf("playlist hub: Tracking error, completed current: %v", h.completedCurrent.Load())
if h.completedCurrent.Load() {
h.logger.Debug().Msg("playlist hub: Assuming current episode is completed")
if h.nextLocalFile != nil {
h.logger.Debug().Str("path", h.nextLocalFile.Path).Msg("playlist hub: Requesting next file")
h.requestNewFileCh <- h.nextLocalFile.Path
//h.completedCurrent.Store(false) do not reset completedCurrent here
} else {
h.logger.Debug().Msg("playlist hub: End of playlist")
h.endOfPlaylistCh <- struct{}{}
h.completedCurrent.Store(false)
}
}
return
}

View File

@@ -0,0 +1,93 @@
package playbackmanager_test
import (
"seanime/internal/events"
"seanime/internal/library/anime"
"seanime/internal/mediaplayers/mediaplayer"
"seanime/internal/mediaplayers/mpchc"
"seanime/internal/mediaplayers/mpv"
"seanime/internal/mediaplayers/vlc"
"seanime/internal/test_utils"
"seanime/internal/util"
"strconv"
"testing"
)
var defaultPlayer = "vlc"
var localFilePaths = []string{
"E:/ANIME/Dungeon Meshi/[EMBER] Dungeon Meshi - 04.mkv",
"E:/ANIME/Dungeon Meshi/[EMBER] Dungeon Meshi - 05.mkv",
"E:/ANIME/Dungeon Meshi/[EMBER] Dungeon Meshi - 06.mkv",
}
var mediaId = 153518
func TestPlaylists(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist(), test_utils.MediaPlayer())
playbackManager, animeCollection, err := getPlaybackManager(t)
if err != nil {
t.Fatal(err)
}
repo := getRepo()
playbackManager.SetMediaPlayerRepository(repo)
playbackManager.SetAnimeCollection(animeCollection)
// Test the playlist hub
lfs := make([]*anime.LocalFile, 0)
for _, path := range localFilePaths {
lf := anime.NewLocalFile(path, "E:/ANIME")
epNum, _ := strconv.Atoi(lf.ParsedData.Episode)
lf.MediaId = mediaId
lf.Metadata.Type = anime.LocalFileTypeMain
lf.Metadata.Episode = epNum
lf.Metadata.AniDBEpisode = lf.ParsedData.Episode
lfs = append(lfs, lf)
}
playlist := &anime.Playlist{
DbId: 1,
Name: "test",
LocalFiles: lfs,
}
err = playbackManager.StartPlaylist(playlist)
if err != nil {
t.Fatal(err)
}
select {}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func getRepo() *mediaplayer.Repository {
logger := util.NewLogger()
WSEventManager := events.NewMockWSEventManager(logger)
vlcI := &vlc.VLC{
Host: test_utils.ConfigData.Provider.VlcPath,
Port: test_utils.ConfigData.Provider.VlcPort,
Password: test_utils.ConfigData.Provider.VlcPassword,
Logger: logger,
}
mpc := &mpchc.MpcHc{
Host: test_utils.ConfigData.Provider.MpcHost,
Path: test_utils.ConfigData.Provider.MpcPath,
Port: test_utils.ConfigData.Provider.MpcPort,
Logger: logger,
}
repo := mediaplayer.NewRepository(&mediaplayer.NewRepositoryOptions{
Logger: logger,
Default: defaultPlayer,
VLC: vlcI,
MpcHc: mpc,
Mpv: mpv.New(logger, "", ""),
WSEventManager: WSEventManager,
})
return repo
}

View File

@@ -0,0 +1,685 @@
package playbackmanager
import (
"cmp"
"context"
"errors"
"seanime/internal/continuity"
discordrpc_presence "seanime/internal/discordrpc/presence"
"seanime/internal/events"
"seanime/internal/library/anime"
"seanime/internal/mediaplayers/mediaplayer"
"seanime/internal/util"
"github.com/samber/mo"
)
var (
ErrProgressUpdateAnilist = errors.New("playback manager: Failed to update progress on AniList")
ErrProgressUpdateMAL = errors.New("playback manager: Failed to update progress on MyAnimeList")
)
func (pm *PlaybackManager) listenToMediaPlayerEvents(ctx context.Context) {
// Listen for media player events
go func() {
for {
select {
// Stop listening when the context is cancelled -- meaning a new MediaPlayer instance is set
case <-ctx.Done():
return
case event := <-pm.mediaPlayerRepoSubscriber.EventCh:
switch e := event.(type) {
// Local file events
case mediaplayer.TrackingStartedEvent: // New video has started playing
pm.handleTrackingStarted(e.Status)
case mediaplayer.VideoCompletedEvent: // Video has been watched completely but still tracking
pm.handleVideoCompleted(e.Status)
case mediaplayer.TrackingStoppedEvent: // Tracking has stopped completely
pm.handleTrackingStopped(e.Reason)
case mediaplayer.PlaybackStatusEvent: // Playback status has changed
pm.handlePlaybackStatus(e.Status)
case mediaplayer.TrackingRetryEvent: // Error occurred while starting tracking
pm.handleTrackingRetry(e.Reason)
// Streaming events
case mediaplayer.StreamingTrackingStartedEvent:
pm.handleStreamingTrackingStarted(e.Status)
case mediaplayer.StreamingPlaybackStatusEvent:
pm.handleStreamingPlaybackStatus(e.Status)
case mediaplayer.StreamingVideoCompletedEvent:
pm.handleStreamingVideoCompleted(e.Status)
case mediaplayer.StreamingTrackingStoppedEvent:
pm.handleStreamingTrackingStopped(e.Reason)
case mediaplayer.StreamingTrackingRetryEvent:
// Do nothing
}
}
}
}()
}
func (pm *PlaybackManager) handleTrackingStarted(status *mediaplayer.PlaybackStatus) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
// Set the playback type
pm.currentPlaybackType = LocalFilePlayback
// Reset the history map
pm.historyMap = make(map[string]PlaybackState)
// Set the current media playback status
pm.currentMediaPlaybackStatus = status
// Get the playback state
_ps := pm.getLocalFilePlaybackState(status)
// Log
pm.Logger.Debug().Msg("playback manager: Tracking started, extracting metadata...")
// Send event to the client
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressTrackingStarted, _ps)
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- PlaybackStatusChangedEvent{Status: *status, State: _ps}
value.EventCh <- VideoStartedEvent{Filename: status.Filename, Filepath: status.Filepath}
return true
})
}()
// Retrieve data about the current video playback
// Set PlaybackManager.currentMediaListEntry to the list entry of the current video
currentMediaListEntry, currentLocalFile, currentLocalFileWrapperEntry, err := pm.getLocalFilePlaybackDetails(status.Filepath)
if err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Failed to get media data")
// Send error event to the client
pm.wsEventManager.SendEvent(events.ErrorToast, err.Error())
//
pm.MediaPlayerRepository.Cancel()
return
}
pm.currentMediaListEntry = mo.Some(currentMediaListEntry)
pm.currentLocalFile = mo.Some(currentLocalFile)
pm.currentLocalFileWrapperEntry = mo.Some(currentLocalFileWrapperEntry)
pm.Logger.Debug().
Str("media", pm.currentMediaListEntry.MustGet().GetMedia().GetPreferredTitle()).
Int("episode", pm.currentLocalFile.MustGet().GetEpisodeNumber()).
Msg("playback manager: Playback started")
pm.continuityManager.SetExternalPlayerEpisodeDetails(&continuity.ExternalPlayerEpisodeDetails{
EpisodeNumber: pm.currentLocalFile.MustGet().GetEpisodeNumber(),
MediaId: pm.currentMediaListEntry.MustGet().GetMedia().GetID(),
Filepath: pm.currentLocalFile.MustGet().GetPath(),
})
// ------- Playlist ------- //
go pm.playlistHub.onVideoStart(pm.currentMediaListEntry.MustGet(), pm.currentLocalFile.MustGet(), _ps)
// ------- Discord ------- //
if pm.discordPresence != nil && !*pm.isOffline {
go pm.discordPresence.SetAnimeActivity(&discordrpc_presence.AnimeActivity{
ID: pm.currentMediaListEntry.MustGet().GetMedia().GetID(),
Title: pm.currentMediaListEntry.MustGet().GetMedia().GetPreferredTitle(),
Image: pm.currentMediaListEntry.MustGet().GetMedia().GetCoverImageSafe(),
IsMovie: pm.currentMediaListEntry.MustGet().GetMedia().IsMovie(),
EpisodeNumber: pm.currentLocalFileWrapperEntry.MustGet().GetProgressNumber(pm.currentLocalFile.MustGet()),
Progress: int(pm.currentMediaPlaybackStatus.CurrentTimeInSeconds),
Duration: int(pm.currentMediaPlaybackStatus.DurationInSeconds),
TotalEpisodes: pm.currentMediaListEntry.MustGet().GetMedia().Episodes,
CurrentEpisodeCount: pm.currentMediaListEntry.MustGet().GetMedia().GetCurrentEpisodeCountOrNil(),
})
}
}
func (pm *PlaybackManager) handleVideoCompleted(status *mediaplayer.PlaybackStatus) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
// Set the current media playback status
pm.currentMediaPlaybackStatus = status
// Get the playback state
_ps := pm.getLocalFilePlaybackState(status)
// Log
pm.Logger.Debug().Msg("playback manager: Received video completed event")
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- PlaybackStatusChangedEvent{Status: *status, State: _ps}
value.EventCh <- VideoCompletedEvent{Filename: status.Filename}
return true
})
}()
//
// Update the progress on AniList if auto update progress is enabled
//
pm.autoSyncCurrentProgress(&_ps)
// Send the playback state with the `ProgressUpdated` flag
// The client will use this to notify the user if the progress has been updated
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressVideoCompleted, _ps)
// Push the video playback state to the history
pm.historyMap[status.Filename] = _ps
// ------- Playlist ------- //
if pm.currentMediaListEntry.IsPresent() && pm.currentLocalFile.IsPresent() {
go pm.playlistHub.onVideoCompleted(pm.currentMediaListEntry.MustGet(), pm.currentLocalFile.MustGet(), _ps)
}
}
func (pm *PlaybackManager) handleTrackingStopped(reason string) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
pm.Logger.Debug().Msg("playback manager: Received tracking stopped event")
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressTrackingStopped, reason)
// Find the next episode and set it to [PlaybackManager.nextEpisodeLocalFile]
if pm.currentMediaListEntry.IsPresent() && pm.currentLocalFile.IsPresent() && pm.currentLocalFileWrapperEntry.IsPresent() {
lf, ok := pm.currentLocalFileWrapperEntry.MustGet().FindNextEpisode(pm.currentLocalFile.MustGet())
if ok {
pm.nextEpisodeLocalFile = mo.Some(lf)
} else {
pm.nextEpisodeLocalFile = mo.None[*anime.LocalFile]()
}
}
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- VideoStoppedEvent{Reason: reason}
return true
})
}()
if pm.currentMediaPlaybackStatus != nil {
pm.continuityManager.UpdateExternalPlayerEpisodeWatchHistoryItem(pm.currentMediaPlaybackStatus.CurrentTimeInSeconds, pm.currentMediaPlaybackStatus.DurationInSeconds)
}
// ------- Playlist ------- //
go pm.playlistHub.onTrackingStopped()
// ------- Discord ------- //
if pm.discordPresence != nil && !*pm.isOffline {
go pm.discordPresence.Close()
}
}
func (pm *PlaybackManager) handlePlaybackStatus(status *mediaplayer.PlaybackStatus) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
pm.currentPlaybackType = LocalFilePlayback
// Set the current media playback status
pm.currentMediaPlaybackStatus = status
// Get the playback state
_ps := pm.getLocalFilePlaybackState(status)
// If the same PlaybackState is in the history, update the ProgressUpdated flag
// PlaybackStatusCh has no way of knowing if the progress has been updated
if h, ok := pm.historyMap[status.Filename]; ok {
_ps.ProgressUpdated = h.ProgressUpdated
}
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- PlaybackStatusChangedEvent{Status: *status, State: _ps}
return true
})
}()
// Send the playback state to the client
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressPlaybackState, _ps)
// ------- Playlist ------- //
if pm.currentMediaListEntry.IsPresent() && pm.currentLocalFile.IsPresent() {
go pm.playlistHub.onPlaybackStatus(pm.currentMediaListEntry.MustGet(), pm.currentLocalFile.MustGet(), _ps)
}
// ------- Discord ------- //
if pm.discordPresence != nil && !*pm.isOffline {
go pm.discordPresence.UpdateAnimeActivity(int(pm.currentMediaPlaybackStatus.CurrentTimeInSeconds), int(pm.currentMediaPlaybackStatus.DurationInSeconds), !pm.currentMediaPlaybackStatus.Playing)
}
}
func (pm *PlaybackManager) handleTrackingRetry(reason string) {
// DEVNOTE: This event is not sent to the client
// We notify the playlist hub, so it can play the next episode (it's assumed that the user closed the player)
// ------- Playlist ------- //
go pm.playlistHub.onTrackingError()
}
func (pm *PlaybackManager) handleStreamingTrackingStarted(status *mediaplayer.PlaybackStatus) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
if pm.currentStreamEpisode.IsAbsent() || pm.currentStreamMedia.IsAbsent() {
return
}
//// Get the media list entry
//// Note that it might be absent if the user is watching a stream that is not in the library
pm.currentMediaListEntry = pm.getStreamPlaybackDetails(pm.currentStreamMedia.MustGet().GetID())
// Set the playback type
pm.currentPlaybackType = StreamPlayback
// Reset the history map
pm.historyMap = make(map[string]PlaybackState)
// Set the current media playback status
pm.currentMediaPlaybackStatus = status
// Get the playback state
_ps := pm.getStreamPlaybackState(status)
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- PlaybackStatusChangedEvent{Status: *status, State: _ps}
value.EventCh <- StreamStartedEvent{Filename: status.Filename, Filepath: status.Filepath}
return true
})
}()
// Log
pm.Logger.Debug().Msg("playback manager: Tracking started for stream")
// Send event to the client
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressTrackingStarted, _ps)
pm.continuityManager.SetExternalPlayerEpisodeDetails(&continuity.ExternalPlayerEpisodeDetails{
EpisodeNumber: pm.currentStreamEpisode.MustGet().GetProgressNumber(),
MediaId: pm.currentStreamMedia.MustGet().GetID(),
Filepath: "",
})
// ------- Discord ------- //
if pm.discordPresence != nil && !*pm.isOffline {
go pm.discordPresence.SetAnimeActivity(&discordrpc_presence.AnimeActivity{
ID: pm.currentStreamMedia.MustGet().GetID(),
Title: pm.currentStreamMedia.MustGet().GetPreferredTitle(),
Image: pm.currentStreamMedia.MustGet().GetCoverImageSafe(),
IsMovie: pm.currentStreamMedia.MustGet().IsMovie(),
EpisodeNumber: pm.currentStreamEpisode.MustGet().GetProgressNumber(),
Progress: int(pm.currentMediaPlaybackStatus.CurrentTimeInSeconds),
Duration: int(pm.currentMediaPlaybackStatus.DurationInSeconds),
TotalEpisodes: pm.currentStreamMedia.MustGet().Episodes,
CurrentEpisodeCount: pm.currentStreamMedia.MustGet().GetCurrentEpisodeCountOrNil(),
})
}
}
func (pm *PlaybackManager) handleStreamingPlaybackStatus(status *mediaplayer.PlaybackStatus) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
if pm.currentStreamEpisode.IsAbsent() {
return
}
pm.currentPlaybackType = StreamPlayback
// Set the current media playback status
pm.currentMediaPlaybackStatus = status
// Get the playback state
_ps := pm.getStreamPlaybackState(status)
// If the same PlaybackState is in the history, update the ProgressUpdated flag
// PlaybackStatusCh has no way of knowing if the progress has been updated
if h, ok := pm.historyMap[status.Filename]; ok {
_ps.ProgressUpdated = h.ProgressUpdated
}
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- PlaybackStatusChangedEvent{Status: *status, State: _ps}
return true
})
}()
// Send the playback state to the client
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressPlaybackState, _ps)
// ------- Discord ------- //
if pm.discordPresence != nil && !*pm.isOffline {
go pm.discordPresence.UpdateAnimeActivity(int(pm.currentMediaPlaybackStatus.CurrentTimeInSeconds), int(pm.currentMediaPlaybackStatus.DurationInSeconds), !pm.currentMediaPlaybackStatus.Playing)
}
}
func (pm *PlaybackManager) handleStreamingVideoCompleted(status *mediaplayer.PlaybackStatus) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
if pm.currentStreamEpisode.IsAbsent() {
return
}
// Set the current media playback status
pm.currentMediaPlaybackStatus = status
// Get the playback state
_ps := pm.getStreamPlaybackState(status)
// Log
pm.Logger.Debug().Msg("playback manager: Received video completed event")
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- PlaybackStatusChangedEvent{Status: *status, State: _ps}
value.EventCh <- StreamCompletedEvent{Filename: status.Filename}
return true
})
}()
//
// Update the progress on AniList if auto update progress is enabled
//
pm.autoSyncCurrentProgress(&_ps)
// Send the playback state with the `ProgressUpdated` flag
// The client will use this to notify the user if the progress has been updated
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressVideoCompleted, _ps)
// Push the video playback state to the history
pm.historyMap[status.Filename] = _ps
}
func (pm *PlaybackManager) handleStreamingTrackingStopped(reason string) {
pm.eventMu.Lock()
defer pm.eventMu.Unlock()
if pm.currentStreamEpisode.IsAbsent() {
return
}
if pm.currentMediaPlaybackStatus != nil {
pm.continuityManager.UpdateExternalPlayerEpisodeWatchHistoryItem(pm.currentMediaPlaybackStatus.CurrentTimeInSeconds, pm.currentMediaPlaybackStatus.DurationInSeconds)
}
// Notify subscribers
go func() {
pm.playbackStatusSubscribers.Range(func(key string, value *PlaybackStatusSubscriber) bool {
if value.canceled.Load() {
return true
}
value.EventCh <- StreamStoppedEvent{Reason: reason}
return true
})
}()
pm.Logger.Debug().Msg("playback manager: Received tracking stopped event")
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressTrackingStopped, reason)
// ------- Discord ------- //
if pm.discordPresence != nil && !*pm.isOffline {
go pm.discordPresence.Close()
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Local File
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// getLocalFilePlaybackState returns a new PlaybackState
func (pm *PlaybackManager) getLocalFilePlaybackState(status *mediaplayer.PlaybackStatus) PlaybackState {
pm.mu.Lock()
defer pm.mu.Unlock()
currentLocalFileWrapperEntry, ok := pm.currentLocalFileWrapperEntry.Get()
if !ok {
return PlaybackState{}
}
currentLocalFile, ok := pm.currentLocalFile.Get()
if !ok {
return PlaybackState{}
}
currentMediaListEntry, ok := pm.currentMediaListEntry.Get()
if !ok {
return PlaybackState{}
}
// Find the following episode
_, canPlayNext := currentLocalFileWrapperEntry.FindNextEpisode(currentLocalFile)
return PlaybackState{
EpisodeNumber: currentLocalFileWrapperEntry.GetProgressNumber(currentLocalFile),
AniDbEpisode: currentLocalFile.GetAniDBEpisode(),
MediaTitle: currentMediaListEntry.GetMedia().GetPreferredTitle(),
MediaTotalEpisodes: currentMediaListEntry.GetMedia().GetCurrentEpisodeCount(),
MediaCoverImage: currentMediaListEntry.GetMedia().GetCoverImageSafe(),
MediaId: currentMediaListEntry.GetMedia().GetID(),
Filename: status.Filename,
CompletionPercentage: status.CompletionPercentage,
CanPlayNext: canPlayNext,
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Stream
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// getStreamPlaybackState returns a new PlaybackState
func (pm *PlaybackManager) getStreamPlaybackState(status *mediaplayer.PlaybackStatus) PlaybackState {
pm.mu.Lock()
defer pm.mu.Unlock()
currentStreamEpisode, ok := pm.currentStreamEpisode.Get()
if !ok {
return PlaybackState{}
}
currentStreamMedia, ok := pm.currentStreamMedia.Get()
if !ok {
return PlaybackState{}
}
currentStreamAniDbEpisode, ok := pm.currentStreamAniDbEpisode.Get()
if !ok {
return PlaybackState{}
}
return PlaybackState{
EpisodeNumber: currentStreamEpisode.GetProgressNumber(),
AniDbEpisode: currentStreamAniDbEpisode,
MediaTitle: currentStreamMedia.GetPreferredTitle(),
MediaTotalEpisodes: currentStreamMedia.GetCurrentEpisodeCount(),
MediaCoverImage: currentStreamMedia.GetCoverImageSafe(),
MediaId: currentStreamMedia.GetID(),
Filename: cmp.Or(status.Filename, "Stream"),
CompletionPercentage: status.CompletionPercentage,
CanPlayNext: false, // DEVNOTE: This is not used for streams
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// autoSyncCurrentProgress syncs the current video playback progress with providers.
// This is called once when a "video complete" event is heard.
func (pm *PlaybackManager) autoSyncCurrentProgress(_ps *PlaybackState) {
shouldUpdate, err := pm.Database.AutoUpdateProgressIsEnabled()
if err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Failed to check if auto update progress is enabled")
return
}
if !shouldUpdate {
return
}
switch pm.currentPlaybackType {
case LocalFilePlayback:
// Note :currentMediaListEntry MUST be defined since we assume that the media is in the user's library
if pm.currentMediaListEntry.IsAbsent() || pm.currentLocalFileWrapperEntry.IsAbsent() || pm.currentLocalFile.IsAbsent() {
return
}
// Check if we should update the progress
// If the current progress is lower than the episode progress number
epProgressNum := pm.currentLocalFileWrapperEntry.MustGet().GetProgressNumber(pm.currentLocalFile.MustGet())
if *pm.currentMediaListEntry.MustGet().Progress >= epProgressNum {
return
}
case StreamPlayback:
if pm.currentStreamEpisode.IsAbsent() || pm.currentStreamMedia.IsAbsent() {
return
}
// Do not auto update progress is the media is in the library AND the progress is higher than the current episode
epProgressNum := pm.currentStreamEpisode.MustGet().GetProgressNumber()
if pm.currentMediaListEntry.IsPresent() && *pm.currentMediaListEntry.MustGet().Progress >= epProgressNum {
return
}
}
// Update the progress on AniList
pm.Logger.Debug().Msg("playback manager: Updating progress on AniList")
err = pm.updateProgress()
if err != nil {
_ps.ProgressUpdated = false
pm.wsEventManager.SendEvent(events.ErrorToast, "Failed to update progress on AniList")
} else {
_ps.ProgressUpdated = true
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressUpdated, _ps)
}
}
// SyncCurrentProgress syncs the current video playback progress with providers
// This method is called when the user manually requests to sync the progress
// - This method will return an error only if the progress update fails on AniList
// - This method will refresh the anilist collection
func (pm *PlaybackManager) SyncCurrentProgress() error {
pm.eventMu.RLock()
err := pm.updateProgress()
if err != nil {
pm.eventMu.RUnlock()
return err
}
// Push the current playback state to the history
if pm.currentMediaPlaybackStatus != nil {
var _ps PlaybackState
switch pm.currentPlaybackType {
case LocalFilePlayback:
pm.getLocalFilePlaybackState(pm.currentMediaPlaybackStatus)
case StreamPlayback:
pm.getStreamPlaybackState(pm.currentMediaPlaybackStatus)
}
_ps.ProgressUpdated = true
pm.historyMap[pm.currentMediaPlaybackStatus.Filename] = _ps
pm.wsEventManager.SendEvent(events.PlaybackManagerProgressUpdated, _ps)
}
pm.refreshAnimeCollectionFunc()
pm.eventMu.RUnlock()
return nil
}
// updateProgress updates the progress of the current video playback on AniList and MyAnimeList.
// This only returns an error if the progress update fails on AniList
// - /!\ When this is called, the PlaybackState should have been pushed to the history
func (pm *PlaybackManager) updateProgress() (err error) {
var mediaId int
var epNum int
var totalEpisodes int
switch pm.currentPlaybackType {
case LocalFilePlayback:
//
// Local File
//
if pm.currentLocalFileWrapperEntry.IsAbsent() || pm.currentLocalFile.IsAbsent() || pm.currentMediaListEntry.IsAbsent() {
return errors.New("no video is being watched")
}
defer util.HandlePanicInModuleWithError("playbackmanager/updateProgress", &err)
/// Online
mediaId = pm.currentMediaListEntry.MustGet().GetMedia().GetID()
epNum = pm.currentLocalFileWrapperEntry.MustGet().GetProgressNumber(pm.currentLocalFile.MustGet())
totalEpisodes = pm.currentMediaListEntry.MustGet().GetMedia().GetTotalEpisodeCount() // total episode count or -1
case StreamPlayback:
//
// Stream
//
// Last sanity check
if pm.currentStreamEpisode.IsAbsent() || pm.currentStreamMedia.IsAbsent() {
return errors.New("no video is being watched")
}
mediaId = pm.currentStreamMedia.MustGet().ID
epNum = pm.currentStreamEpisode.MustGet().GetProgressNumber()
totalEpisodes = pm.currentStreamMedia.MustGet().GetTotalEpisodeCount() // total episode count or -1
case ManualTrackingPlayback:
//
// Manual Tracking
//
if pm.currentManualTrackingState.IsAbsent() {
return errors.New("no media file is being manually tracked")
}
defer func() {
if pm.manualTrackingCtxCancel != nil {
pm.manualTrackingCtxCancel()
}
}()
/// Online
mediaId = pm.currentManualTrackingState.MustGet().MediaId
epNum = pm.currentManualTrackingState.MustGet().EpisodeNumber
totalEpisodes = pm.currentManualTrackingState.MustGet().TotalEpisodes
default:
return errors.New("unknown playback type")
}
if mediaId == 0 { // Sanity check
return errors.New("media ID not found")
}
// Update the progress on AniList
err = pm.platform.UpdateEntryProgress(
context.Background(),
mediaId,
epNum,
&totalEpisodes,
)
if err != nil {
pm.Logger.Error().Err(err).Msg("playback manager: Error occurred while updating progress on AniList")
return ErrProgressUpdateAnilist
}
pm.refreshAnimeCollectionFunc() // Refresh the AniList collection
pm.Logger.Info().Msg("playback manager: Updated progress on AniList")
return nil
}

View File

@@ -0,0 +1,22 @@
package playbackmanager
import "seanime/internal/library/anime"
type (
StreamMagnetRequestOptions struct {
MagnetLink string `json:"magnet_link"` // magnet link to stream
OptionalMediaId int `json:"optionalMediaId,omitempty"` // optional media ID to associate with the magnet link
Untracked bool `json:"untracked"`
}
// TrackedStreamMagnetRequestResponse is returned after analysis of the magnet link
TrackedStreamMagnetRequestResponse struct {
EpisodeNumber int `json:"episodeNumber"` // episode number of the magnet link
EpisodeCollection *anime.EpisodeCollection `json:"episodeCollection"`
}
TrackedStreamMagnetOptions struct {
EpisodeNumber int `json:"episodeNumber"`
AniDBEpisode string `json:"anidbEpisode"`
}
)

View File

@@ -0,0 +1,141 @@
package playbackmanager
import (
"errors"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/database/db_bridge"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/util"
"strings"
"github.com/samber/mo"
)
// GetCurrentMediaID returns the media id of the currently playing media
func (pm *PlaybackManager) GetCurrentMediaID() (int, error) {
if pm.currentLocalFile.IsAbsent() {
return 0, errors.New("no media is currently playing")
}
return pm.currentLocalFile.MustGet().MediaId, nil
}
// GetLocalFilePlaybackDetails is called once everytime a new video is played. It returns the anilist entry, local file and local file wrapper entry.
func (pm *PlaybackManager) getLocalFilePlaybackDetails(path string) (*anilist.AnimeListEntry, *anime.LocalFile, *anime.LocalFileWrapperEntry, error) {
pm.mu.Lock()
defer pm.mu.Unlock()
// Normalize path
path = util.NormalizePath(path)
pm.Logger.Debug().Str("path", path).Msg("playback manager: Getting local file playback details")
// Find the local file from the path
lfs, _, err := db_bridge.GetLocalFiles(pm.Database)
if err != nil {
return nil, nil, nil, fmt.Errorf("error getting local files: %s", err.Error())
}
reqEvent := &PlaybackLocalFileDetailsRequestedEvent{
Path: path,
LocalFiles: lfs,
AnimeListEntry: &anilist.AnimeListEntry{},
LocalFile: &anime.LocalFile{},
LocalFileWrapperEntry: &anime.LocalFileWrapperEntry{},
}
err = hook.GlobalHookManager.OnPlaybackLocalFileDetailsRequested().Trigger(reqEvent)
if err != nil {
return nil, nil, nil, err
}
lfs = reqEvent.LocalFiles // Override the local files
// Default prevented, use the hook's details
if reqEvent.DefaultPrevented {
pm.Logger.Debug().Msg("playback manager: Local file details processing prevented by hook")
if reqEvent.AnimeListEntry == nil || reqEvent.LocalFile == nil || reqEvent.LocalFileWrapperEntry == nil {
return nil, nil, nil, errors.New("local file details not found")
}
return reqEvent.AnimeListEntry, reqEvent.LocalFile, reqEvent.LocalFileWrapperEntry, nil
}
var lf *anime.LocalFile
// Find the local file from the path
for _, l := range lfs {
if l.GetNormalizedPath() == path {
lf = l
pm.Logger.Debug().Msg("playback manager: Local file found by path")
break
}
}
// If the local file is not found, the path might be a filename (in the case of VLC)
if lf == nil {
for _, l := range lfs {
if strings.ToLower(l.Name) == path {
pm.Logger.Debug().Msg("playback manager: Local file found by name")
lf = l
break
}
}
}
if lf == nil {
return nil, nil, nil, errors.New("local file not found")
}
if lf.MediaId == 0 {
return nil, nil, nil, errors.New("local file has not been matched")
}
if pm.animeCollection.IsAbsent() {
return nil, nil, nil, fmt.Errorf("error getting anime collection: %w", err)
}
ret, ok := pm.animeCollection.MustGet().GetListEntryFromAnimeId(lf.MediaId)
if !ok {
return nil, nil, nil, errors.New("anilist list entry not found")
}
// Create local file wrapper
lfw := anime.NewLocalFileWrapper(lfs)
lfe, ok := lfw.GetLocalEntryById(lf.MediaId)
if !ok {
return nil, nil, nil, errors.New("local file wrapper entry not found")
}
return ret, lf, lfe, nil
}
// GetStreamPlaybackDetails is called once everytime a new video is played.
func (pm *PlaybackManager) getStreamPlaybackDetails(mId int) mo.Option[*anilist.AnimeListEntry] {
pm.mu.Lock()
defer pm.mu.Unlock()
if pm.animeCollection.IsAbsent() {
return mo.None[*anilist.AnimeListEntry]()
}
reqEvent := &PlaybackStreamDetailsRequestedEvent{
AnimeCollection: pm.animeCollection.MustGet(),
MediaId: mId,
AnimeListEntry: &anilist.AnimeListEntry{},
}
err := hook.GlobalHookManager.OnPlaybackStreamDetailsRequested().Trigger(reqEvent)
if err != nil {
return mo.None[*anilist.AnimeListEntry]()
}
if reqEvent.DefaultPrevented {
pm.Logger.Debug().Msg("playback manager: Stream details processing prevented by hook")
if reqEvent.AnimeListEntry == nil {
return mo.None[*anilist.AnimeListEntry]()
}
return mo.Some(reqEvent.AnimeListEntry)
}
ret, ok := pm.animeCollection.MustGet().GetListEntryFromAnimeId(mId)
if !ok {
return mo.None[*anilist.AnimeListEntry]()
}
return mo.Some(ret)
}

View File

@@ -0,0 +1,129 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/hook_resolver"
"seanime/internal/library/anime"
)
// ScanStartedEvent is triggered when the scanning process begins.
// Prevent default to skip the rest of the scanning process and return the local files.
type ScanStartedEvent struct {
hook_resolver.Event
// The main directory to scan
LibraryPath string `json:"libraryPath"`
// Other directories to scan
OtherLibraryPaths []string `json:"otherLibraryPaths"`
// Whether to use enhanced scanning,
// Enhanced scanning will fetch media from AniList based on the local files' titles,
// and use the metadata to match the local files.
Enhanced bool `json:"enhanced"`
// Whether to skip locked files
SkipLocked bool `json:"skipLocked"`
// Whether to skip ignored files
SkipIgnored bool `json:"skipIgnored"`
// All previously scanned local files
LocalFiles []*anime.LocalFile `json:"localFiles"`
}
// ScanFilePathsRetrievedEvent is triggered when the file paths to scan are retrieved.
// The event includes file paths from all directories to scan.
// The event includes file paths of local files that will be skipped.
type ScanFilePathsRetrievedEvent struct {
hook_resolver.Event
FilePaths []string `json:"filePaths"`
}
// ScanLocalFilesParsedEvent is triggered right after the file paths are parsed into local file objects.
// The event does not include local files that are skipped.
type ScanLocalFilesParsedEvent struct {
hook_resolver.Event
LocalFiles []*anime.LocalFile `json:"localFiles"`
}
// ScanCompletedEvent is triggered when the scanning process finishes.
// The event includes all the local files (skipped and scanned) to be inserted as a new entry.
// Right after this event, the local files will be inserted as a new entry.
type ScanCompletedEvent struct {
hook_resolver.Event
LocalFiles []*anime.LocalFile `json:"localFiles"`
Duration int `json:"duration"` // in milliseconds
}
// ScanMediaFetcherStartedEvent is triggered right before Seanime starts fetching media to be matched against the local files.
type ScanMediaFetcherStartedEvent struct {
hook_resolver.Event
// Whether to use enhanced scanning.
// Enhanced scanning will fetch media from AniList based on the local files' titles,
// and use the metadata to match the local files.
Enhanced bool `json:"enhanced"`
}
// ScanMediaFetcherCompletedEvent is triggered when the media fetcher completes.
// The event includes all the media fetched from AniList.
// The event includes the media IDs that are not in the user's collection.
type ScanMediaFetcherCompletedEvent struct {
hook_resolver.Event
// All media fetched from AniList, to be matched against the local files.
AllMedia []*anilist.CompleteAnime `json:"allMedia"`
// Media IDs that are not in the user's collection.
UnknownMediaIds []int `json:"unknownMediaIds"`
}
// ScanMatchingStartedEvent is triggered when the matching process begins.
// Prevent default to skip the default matching, in which case modified local files will be used.
type ScanMatchingStartedEvent struct {
hook_resolver.Event
// Local files to be matched.
// If default is prevented, these local files will be used.
LocalFiles []*anime.LocalFile `json:"localFiles"`
// Media to be matched against the local files.
NormalizedMedia []*anime.NormalizedMedia `json:"normalizedMedia"`
// Matching algorithm.
Algorithm string `json:"algorithm"`
// Matching threshold.
Threshold float64 `json:"threshold"`
}
// ScanLocalFileMatchedEvent is triggered when a local file is matched with media and before the match is analyzed.
// Prevent default to skip the default analysis and override the match.
type ScanLocalFileMatchedEvent struct {
hook_resolver.Event
// Can be nil if there's no match
Match *anime.NormalizedMedia `json:"match"`
Found bool `json:"found"`
LocalFile *anime.LocalFile `json:"localFile"`
Score float64 `json:"score"`
}
// ScanMatchingCompletedEvent is triggered when the matching process completes.
type ScanMatchingCompletedEvent struct {
hook_resolver.Event
LocalFiles []*anime.LocalFile `json:"localFiles"`
}
// ScanHydrationStartedEvent is triggered when the file hydration process begins.
// Prevent default to skip the rest of the hydration process, in which case the event's local files will be used.
type ScanHydrationStartedEvent struct {
hook_resolver.Event
// Local files to be hydrated.
LocalFiles []*anime.LocalFile `json:"localFiles"`
// Media to be hydrated.
AllMedia []*anime.NormalizedMedia `json:"allMedia"`
}
// ScanLocalFileHydrationStartedEvent is triggered when a local file's metadata is about to be hydrated.
// Prevent default to skip the default hydration and override the hydration.
type ScanLocalFileHydrationStartedEvent struct {
hook_resolver.Event
LocalFile *anime.LocalFile `json:"localFile"`
Media *anime.NormalizedMedia `json:"media"`
}
// ScanLocalFileHydratedEvent is triggered when a local file's metadata is hydrated
type ScanLocalFileHydratedEvent struct {
hook_resolver.Event
LocalFile *anime.LocalFile `json:"localFile"`
MediaId int `json:"mediaId"`
Episode int `json:"episode"`
}

View File

@@ -0,0 +1,525 @@
package scanner
import (
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/library/summary"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/comparison"
"seanime/internal/util/limiter"
"strconv"
"time"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"github.com/sourcegraph/conc/pool"
)
// FileHydrator hydrates the metadata of all (matched) LocalFiles.
// LocalFiles should already have their media ID hydrated.
type FileHydrator struct {
LocalFiles []*anime.LocalFile // Local files to hydrate
AllMedia []*anime.NormalizedMedia // All media used to hydrate local files
CompleteAnimeCache *anilist.CompleteAnimeCache
Platform platform.Platform
MetadataProvider metadata.Provider
AnilistRateLimiter *limiter.Limiter
Logger *zerolog.Logger
ScanLogger *ScanLogger // optional
ScanSummaryLogger *summary.ScanSummaryLogger // optional
ForceMediaId int // optional - force all local files to have this media ID
}
// HydrateMetadata will hydrate the metadata of each LocalFile with the metadata of the matched anilist.BaseAnime.
// It will divide the LocalFiles into groups based on their media ID and process each group in parallel.
func (fh *FileHydrator) HydrateMetadata() {
start := time.Now()
rateLimiter := limiter.NewLimiter(5*time.Second, 20)
fh.Logger.Debug().Msg("hydrator: Starting metadata hydration")
// Invoke ScanHydrationStarted hook
event := &ScanHydrationStartedEvent{
LocalFiles: fh.LocalFiles,
AllMedia: fh.AllMedia,
}
_ = hook.GlobalHookManager.OnScanHydrationStarted().Trigger(event)
fh.LocalFiles = event.LocalFiles
fh.AllMedia = event.AllMedia
// Default prevented, do not hydrate the metadata
if event.DefaultPrevented {
return
}
// Group local files by media ID
groups := lop.GroupBy(fh.LocalFiles, func(localFile *anime.LocalFile) int {
return localFile.MediaId
})
// Remove the group with unmatched media
delete(groups, 0)
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.InfoLevel).
Int("entryCount", len(groups)).
Msg("Starting metadata hydration process")
}
// Process each group in parallel
p := pool.New()
for mId, files := range groups {
p.Go(func() {
if len(files) > 0 {
fh.hydrateGroupMetadata(mId, files, rateLimiter)
}
})
}
p.Wait()
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.InfoLevel).
Int64("ms", time.Since(start).Milliseconds()).
Msg("Finished metadata hydration")
}
}
func (fh *FileHydrator) hydrateGroupMetadata(
mId int,
lfs []*anime.LocalFile, // Grouped local files
rateLimiter *limiter.Limiter,
) {
// Get the media
media, found := lo.Find(fh.AllMedia, func(media *anime.NormalizedMedia) bool {
return media.ID == mId
})
if !found {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.ErrorLevel).
Int("mediaId", mId).
Msg("Could not find media in FileHydrator options")
}
return
}
// Tree contains media relations
tree := anilist.NewCompleteAnimeRelationTree()
// Tree analysis used for episode normalization
var mediaTreeAnalysis *MediaTreeAnalysis
treeFetched := false
// Process each local file in the group sequentially
lo.ForEach(lfs, func(lf *anime.LocalFile, index int) {
defer util.HandlePanicInModuleThenS("scanner/hydrator/hydrateGroupMetadata", func(stackTrace string) {
lf.MediaId = 0
/*Log*/
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.ErrorLevel).
Str("filename", lf.Name).
Msg("Panic occurred, file un-matched")
}
fh.ScanSummaryLogger.LogPanic(lf, stackTrace)
})
episode := -1
// Invoke ScanLocalFileHydrationStarted hook
event := &ScanLocalFileHydrationStartedEvent{
LocalFile: lf,
Media: media,
}
_ = hook.GlobalHookManager.OnScanLocalFileHydrationStarted().Trigger(event)
lf = event.LocalFile
media = event.Media
defer func() {
// Invoke ScanLocalFileHydrated hook
event := &ScanLocalFileHydratedEvent{
LocalFile: lf,
MediaId: mId,
Episode: episode,
}
_ = hook.GlobalHookManager.OnScanLocalFileHydrated().Trigger(event)
lf = event.LocalFile
mId = event.MediaId
episode = event.Episode
}()
// Handle hook override
if event.DefaultPrevented {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.DebugLevel).
Str("filename", lf.Name).
Msg("Default hydration skipped by hook")
}
fh.ScanSummaryLogger.LogDebug(lf, "Default hydration skipped by hook")
return
}
lf.Metadata.Type = anime.LocalFileTypeMain
// Get episode number
if len(lf.ParsedData.Episode) > 0 {
if ep, ok := util.StringToInt(lf.ParsedData.Episode); ok {
episode = ep
}
}
// NC metadata
if comparison.ValueContainsNC(lf.Name) {
lf.Metadata.Episode = 0
lf.Metadata.AniDBEpisode = ""
lf.Metadata.Type = anime.LocalFileTypeNC
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as NC")
}
fh.ScanSummaryLogger.LogMetadataNC(lf)
return
}
// Special metadata
if comparison.ValueContainsSpecial(lf.Name) {
lf.Metadata.Type = anime.LocalFileTypeSpecial
if episode > -1 {
// ep14 (13 original) -> ep1 s1
if episode > media.GetCurrentEpisodeCount() {
lf.Metadata.Episode = episode - media.GetCurrentEpisodeCount()
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(episode-media.GetCurrentEpisodeCount())
} else {
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(episode)
}
} else {
lf.Metadata.Episode = 1
lf.Metadata.AniDBEpisode = "S1"
}
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as special")
}
fh.ScanSummaryLogger.LogMetadataSpecial(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Movie metadata
if *media.Format == anilist.MediaFormatMovie {
lf.Metadata.Episode = 1
lf.Metadata.AniDBEpisode = "1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// No absolute episode count
// "media.GetTotalEpisodeCount() == -1" is a fix for media with unknown episode count, we will just assume that the episode number is correct
// TODO: We might want to fetch the media when the episode count is unknown in order to get the correct episode count
if episode > -1 && (episode <= media.GetCurrentEpisodeCount() || media.GetTotalEpisodeCount() == -1) {
// Episode 0 - Might be a special
// By default, we will assume that AniDB doesn't include Episode 0 as part of the main episodes (which is often the case)
// If this proves to be wrong, media_entry.go will offset the AniDBEpisode by 1 and treat "S1" as "1" when it is a main episode
if episode == 0 {
// Leave episode number as 0, assuming that the client will handle tracking correctly
lf.Metadata.Episode = 0
lf.Metadata.AniDBEpisode = "S1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataEpisodeZero(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Episode number is higher but media only has 1 episode
// - Might be a movie that was not correctly identified as such
// - Or, the torrent files were divided into multiple episodes from a media that is listed as a movie on AniList
if episode > media.GetCurrentEpisodeCount() && media.GetTotalEpisodeCount() == 1 {
lf.Metadata.Episode = 1 // Coerce episode number to 1 because it is used for tracking
lf.Metadata.AniDBEpisode = "1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Str("warning", "File's episode number is higher than the media's episode count, but the media only has 1 episode").
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// No episode number, but the media only has 1 episode
if episode == -1 && media.GetCurrentEpisodeCount() == 1 {
lf.Metadata.Episode = 1 // Coerce episode number to 1 because it is used for tracking
lf.Metadata.AniDBEpisode = "1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Str("warning", "No episode number found, but the media only has 1 episode").
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Still no episode number and the media has more than 1 episode and is not a movie
// We will mark it as a special episode
if episode == -1 {
lf.Metadata.Type = anime.LocalFileTypeSpecial
lf.Metadata.Episode = 1
lf.Metadata.AniDBEpisode = "S1"
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.ErrorLevel, lf, mId, episode).
Msg("No episode number found, file has been marked as special")
}
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("no episode number found"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Absolute episode count
if episode > media.GetCurrentEpisodeCount() && fh.ForceMediaId == 0 {
if !treeFetched {
mediaTreeFetchStart := time.Now()
// Fetch media tree
// The media tree will be used to normalize episode numbers
if err := media.FetchMediaTree(anilist.FetchMediaTreeAll, fh.Platform.GetAnilistClient(), fh.AnilistRateLimiter, tree, fh.CompleteAnimeCache); err == nil {
// Create a new media tree analysis that will be used for episode normalization
mta, _ := NewMediaTreeAnalysis(&MediaTreeAnalysisOptions{
tree: tree,
metadataProvider: fh.MetadataProvider,
rateLimiter: rateLimiter,
})
// Hoist the media tree analysis, so it will be used by other files
// We don't care if it's nil because [normalizeEpisodeNumberAndHydrate] will handle it
mediaTreeAnalysis = mta
treeFetched = true
/*Log */
if mta != nil && mta.branches != nil {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.DebugLevel).
Int("mediaId", mId).
Int64("ms", time.Since(mediaTreeFetchStart).Milliseconds()).
Int("requests", len(mediaTreeAnalysis.branches)).
Any("branches", mediaTreeAnalysis.printBranches()).
Msg("Media tree fetched")
}
fh.ScanSummaryLogger.LogMetadataMediaTreeFetched(lf, time.Since(mediaTreeFetchStart).Milliseconds(), len(mediaTreeAnalysis.branches))
}
} else {
if fh.ScanLogger != nil {
fh.ScanLogger.LogFileHydrator(zerolog.ErrorLevel).
Int("mediaId", mId).
Str("error", err.Error()).
Int64("ms", time.Since(mediaTreeFetchStart).Milliseconds()).
Msg("Could not fetch media tree")
}
fh.ScanSummaryLogger.LogMetadataMediaTreeFetchFailed(lf, err, time.Since(mediaTreeFetchStart).Milliseconds())
}
}
// Normalize episode number
if err := fh.normalizeEpisodeNumberAndHydrate(mediaTreeAnalysis, lf, episode, media.GetCurrentEpisodeCount()); err != nil {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Dict("mediaTreeAnalysis", zerolog.Dict().
Bool("normalized", false).
Str("error", err.Error()).
Str("reason", "Episode normalization failed"),
).
Msg("File has been marked as special")
}
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, err, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
} else {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, episode).
Dict("mediaTreeAnalysis", zerolog.Dict().
Bool("normalized", true).
Bool("hasNewMediaId", lf.MediaId != mId).
Int("newMediaId", lf.MediaId),
).
Msg("File has been marked as main")
}
fh.ScanSummaryLogger.LogMetadataEpisodeNormalized(lf, mId, episode, lf.Metadata.Episode, lf.MediaId, lf.Metadata.AniDBEpisode)
}
return
}
// Absolute episode count with forced media ID
if fh.ForceMediaId != 0 && episode > media.GetCurrentEpisodeCount() {
// When we encounter a file with an episode number higher than the media's episode count
// we have a forced media ID, we will fetch the media from AniList and get the offset
animeMetadata, err := fh.MetadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, fh.ForceMediaId)
if err != nil {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.ErrorLevel, lf, mId, episode).
Str("error", err.Error()).
Msg("Could not fetch AniDB metadata")
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("could not fetch AniDB metadata"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// Get the first episode to calculate the offset
firstEp, ok := animeMetadata.Episodes["1"]
if !ok {
/*Log */
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.ErrorLevel, lf, mId, episode).
Msg("Could not find absolute episode offset")
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("could not find absolute episode offset"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
// ref: media_tree_analysis.go
usePartEpisodeNumber := firstEp.EpisodeNumber > 1 && firstEp.AbsoluteEpisodeNumber-firstEp.EpisodeNumber > 1
minPartAbsoluteEpisodeNumber := 0
maxPartAbsoluteEpisodeNumber := 0
if usePartEpisodeNumber {
minPartAbsoluteEpisodeNumber = firstEp.EpisodeNumber
maxPartAbsoluteEpisodeNumber = minPartAbsoluteEpisodeNumber + animeMetadata.GetMainEpisodeCount() - 1
}
absoluteEpisodeNumber := firstEp.AbsoluteEpisodeNumber
// Calculate the relative episode number
relativeEp := episode
// Let's say the media has 12 episodes and the file is "episode 13"
// If the [partAbsoluteEpisodeNumber] is 13, then the [relativeEp] will be 1, we can safely ignore the [absoluteEpisodeNumber]
// e.g. 13 - (13-1) = 1
if minPartAbsoluteEpisodeNumber <= episode && maxPartAbsoluteEpisodeNumber >= episode {
relativeEp = episode - (minPartAbsoluteEpisodeNumber - 1)
} else {
// Let's say the media has 12 episodes and the file is "episode 38"
// The [absoluteEpisodeNumber] will be 38 and the [relativeEp] will be 1
// e.g. 38 - (38-1) = 1
relativeEp = episode - (absoluteEpisodeNumber - 1)
}
if relativeEp < 1 {
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.WarnLevel, lf, mId, episode).
Dict("normalization", zerolog.Dict().
Bool("normalized", false).
Str("reason", "Episode normalization failed, could not find relative episode number"),
).
Msg("File has been marked as main")
}
lf.Metadata.Episode = episode
lf.Metadata.AniDBEpisode = strconv.Itoa(episode)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataEpisodeNormalizationFailed(lf, errors.New("could not find relative episode number"), lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
if fh.ScanLogger != nil {
fh.logFileHydration(zerolog.DebugLevel, lf, mId, relativeEp).
Dict("mediaTreeAnalysis", zerolog.Dict().
Bool("normalized", true).
Int("forcedMediaId", fh.ForceMediaId),
).
Msg("File has been marked as main")
}
lf.Metadata.Episode = relativeEp
lf.Metadata.AniDBEpisode = strconv.Itoa(relativeEp)
lf.MediaId = fh.ForceMediaId
fh.ScanSummaryLogger.LogMetadataMain(lf, lf.Metadata.Episode, lf.Metadata.AniDBEpisode)
return
}
})
}
func (fh *FileHydrator) logFileHydration(level zerolog.Level, lf *anime.LocalFile, mId int, episode int) *zerolog.Event {
return fh.ScanLogger.LogFileHydrator(level).
Str("filename", lf.Name).
Int("mediaId", mId).
Dict("vars", zerolog.Dict().
Str("parsedEpisode", lf.ParsedData.Episode).
Int("episode", episode),
).
Dict("metadata", zerolog.Dict().
Int("episode", lf.Metadata.Episode).
Str("aniDBEpisode", lf.Metadata.AniDBEpisode))
}
// normalizeEpisodeNumberAndHydrate will normalize the episode number and hydrate the metadata of the LocalFile.
// If the MediaTreeAnalysis is nil, the episode number will not be normalized.
func (fh *FileHydrator) normalizeEpisodeNumberAndHydrate(
mta *MediaTreeAnalysis,
lf *anime.LocalFile,
ep int, // The absolute episode number of the media
maxEp int, // The maximum episode number of the media
) error {
// No media tree analysis
if mta == nil {
diff := ep - maxEp // e.g. 14 - 12 = 2
// Let's consider this a special episode (it might not exist on AniDB, but it's better than setting everything to "S1")
lf.Metadata.Episode = diff // e.g. 2
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(diff) // e.g. S2
lf.Metadata.Type = anime.LocalFileTypeSpecial
return errors.New("[hydrator] could not find media tree")
}
relativeEp, mediaId, ok := mta.getRelativeEpisodeNumber(ep)
if !ok {
diff := ep - maxEp // e.g. 14 - 12 = 2
// Do the same as above
lf.Metadata.Episode = diff
lf.Metadata.AniDBEpisode = "S" + strconv.Itoa(diff) // e.g. S2
lf.Metadata.Type = anime.LocalFileTypeSpecial
return errors.New("[hydrator] could not find relative episode number from media tree")
}
lf.Metadata.Episode = relativeEp
lf.Metadata.AniDBEpisode = strconv.Itoa(relativeEp)
lf.MediaId = mediaId
return nil
}

View File

@@ -0,0 +1,122 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
)
func TestFileHydrator_HydrateMetadata(t *testing.T) {
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
logger := util.NewLogger()
metadataProvider := metadata.GetMockProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
allMedia := animeCollection.GetAllAnime()
tests := []struct {
name string
paths []string
expectedMediaId int
}{
{
name: "should be hydrated with id 131586",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: 131586, // 86 - Eighty Six Part 2
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, "E:/Anime")
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
for _, nm := range mc.NormalizedMedia {
t.Logf("media id: %d, title: %s", nm.ID, nm.GetTitleSafe())
}
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: nil,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
}
err = matcher.MatchLocalFilesWithMedia()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | FileHydrator |
// +---------------------+
fh := &FileHydrator{
LocalFiles: lfs,
AllMedia: mc.NormalizedMedia,
CompleteAnimeCache: completeAnimeCache,
Platform: anilistPlatform,
AnilistRateLimiter: anilistRateLimiter,
MetadataProvider: metadataProvider,
Logger: logger,
ScanLogger: scanLogger,
}
fh.HydrateMetadata()
for _, lf := range fh.LocalFiles {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
})
}
}

View File

@@ -0,0 +1,3 @@
package scanner
// .seaignore

View File

@@ -0,0 +1,28 @@
package scanner
import (
"github.com/rs/zerolog"
lop "github.com/samber/lo/parallel"
"seanime/internal/library/anime"
"seanime/internal/library/filesystem"
)
// GetLocalFilesFromDir creates a new LocalFile for each video file
func GetLocalFilesFromDir(dirPath string, logger *zerolog.Logger) ([]*anime.LocalFile, error) {
paths, err := filesystem.GetMediaFilePathsFromDirS(dirPath)
logger.Trace().
Any("dirPath", dirPath).
Msg("localfile: Retrieving and creating local files")
// Concurrently populate localFiles
localFiles := lop.Map(paths, func(path string, index int) *anime.LocalFile {
return anime.NewLocalFile(path, dirPath)
})
logger.Trace().
Any("count", len(localFiles)).
Msg("localfile: Retrieved local files")
return localFiles, err
}

View File

@@ -0,0 +1,21 @@
package scanner
import (
"github.com/stretchr/testify/assert"
"seanime/internal/util"
"testing"
)
func TestGetLocalFilesFromDir(t *testing.T) {
t.Skip("Skipping test that requires local files")
var dir = "E:/Anime"
logger := util.NewLogger()
localFiles, err := GetLocalFilesFromDir(dir, logger)
if assert.NoError(t, err) {
t.Logf("Found %d local files", len(localFiles))
}
}

View File

@@ -0,0 +1,552 @@
package scanner
import (
"errors"
"fmt"
"math"
"seanime/internal/api/anilist"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/library/summary"
"seanime/internal/util"
"seanime/internal/util/comparison"
"time"
"github.com/adrg/strutil/metrics"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"github.com/sourcegraph/conc/pool"
)
type Matcher struct {
LocalFiles []*anime.LocalFile
MediaContainer *MediaContainer
CompleteAnimeCache *anilist.CompleteAnimeCache
Logger *zerolog.Logger
ScanLogger *ScanLogger
ScanSummaryLogger *summary.ScanSummaryLogger // optional
Algorithm string
Threshold float64
}
var (
ErrNoLocalFiles = errors.New("[matcher] no local files")
)
// MatchLocalFilesWithMedia will match each anime.LocalFile with a specific anilist.BaseAnime and modify the LocalFile's `mediaId`
func (m *Matcher) MatchLocalFilesWithMedia() error {
if m.Threshold == 0 {
m.Threshold = 0.5
}
start := time.Now()
if len(m.LocalFiles) == 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).Msg("No local files")
}
return ErrNoLocalFiles
}
if len(m.MediaContainer.allMedia) == 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).Msg("No media fed into the matcher")
}
return errors.New("[matcher] no media fed into the matcher")
}
m.Logger.Debug().Msg("matcher: Starting matching process")
// Invoke ScanMatchingStarted hook
event := &ScanMatchingStartedEvent{
LocalFiles: m.LocalFiles,
NormalizedMedia: m.MediaContainer.NormalizedMedia,
Algorithm: m.Algorithm,
Threshold: m.Threshold,
}
_ = hook.GlobalHookManager.OnScanMatchingStarted().Trigger(event)
m.LocalFiles = event.LocalFiles
m.MediaContainer.NormalizedMedia = event.NormalizedMedia
m.Algorithm = event.Algorithm
m.Threshold = event.Threshold
if event.DefaultPrevented {
m.Logger.Debug().Msg("matcher: Match stopped by hook")
return nil
}
// Parallelize the matching process
lop.ForEach(m.LocalFiles, func(localFile *anime.LocalFile, _ int) {
m.matchLocalFileWithMedia(localFile)
})
// m.validateMatches()
// Invoke ScanMatchingCompleted hook
completedEvent := &ScanMatchingCompletedEvent{
LocalFiles: m.LocalFiles,
}
_ = hook.GlobalHookManager.OnScanMatchingCompleted().Trigger(completedEvent)
m.LocalFiles = completedEvent.LocalFiles
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.InfoLevel).
Int64("ms", time.Since(start).Milliseconds()).
Int("files", len(m.LocalFiles)).
Int("unmatched", lo.CountBy(m.LocalFiles, func(localFile *anime.LocalFile) bool {
return localFile.MediaId == 0
})).
Msg("Finished matching process")
}
return nil
}
// matchLocalFileWithMedia finds the best match for the local file
// If the best match is above a certain threshold, set the local file's mediaId to the best match's id
// If the best match is below a certain threshold, leave the local file's mediaId to 0
func (m *Matcher) matchLocalFileWithMedia(lf *anime.LocalFile) {
defer util.HandlePanicInModuleThenS("scanner/matcher/matchLocalFileWithMedia", func(stackTrace string) {
lf.MediaId = 0
/*Log*/
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.ErrorLevel).
Str("filename", lf.Name).
Msg("Panic occurred, file un-matched")
}
m.ScanSummaryLogger.LogPanic(lf, stackTrace)
})
// Check if the local file has already been matched
if lf.MediaId != 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Msg("File already matched")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "Already matched")
return
}
// Check if the local file has a title
if lf.GetParsedTitle() == "" {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).
Str("filename", lf.Name).
Msg("File has no parsed title")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "No parsed title found")
return
}
// Create title variations
// Check cache for title variation
titleVariations := lf.GetTitleVariations()
if len(titleVariations) == 0 {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).
Str("filename", lf.Name).
Msg("No titles found")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "No title variations found")
return
}
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("titleVariations", titleVariations).
Msg("Matching local file")
}
m.ScanSummaryLogger.LogDebug(lf, util.InlineSpewT(titleVariations))
//------------------
var levMatch *comparison.LevenshteinResult
var sdMatch *comparison.SorensenDiceResult
var jaccardMatch *comparison.JaccardResult
if m.Algorithm == "jaccard" {
// Using Jaccard
// Get the matchs for each title variation
compResults := lop.Map(titleVariations, func(title *string, _ int) *comparison.JaccardResult {
comps := make([]*comparison.JaccardResult, 0)
if len(m.MediaContainer.engTitles) > 0 {
if eng, found := comparison.FindBestMatchWithJaccard(title, m.MediaContainer.engTitles); found {
comps = append(comps, eng)
}
}
if len(m.MediaContainer.romTitles) > 0 {
if rom, found := comparison.FindBestMatchWithJaccard(title, m.MediaContainer.romTitles); found {
comps = append(comps, rom)
}
}
if len(m.MediaContainer.synonyms) > 0 {
if syn, found := comparison.FindBestMatchWithJaccard(title, m.MediaContainer.synonyms); found {
comps = append(comps, syn)
}
}
var res *comparison.JaccardResult
if len(comps) > 1 {
res = lo.Reduce(comps, func(prev *comparison.JaccardResult, curr *comparison.JaccardResult, _ int) *comparison.JaccardResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, comps[0])
} else if len(comps) == 1 {
return comps[0]
}
return res
})
// Retrieve the match from all the title variations results
jaccardMatch = lo.Reduce(compResults, func(prev *comparison.JaccardResult, curr *comparison.JaccardResult, _ int) *comparison.JaccardResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, compResults[0])
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("match", jaccardMatch).
Interface("results", compResults).
Msg("Jaccard match")
}
m.ScanSummaryLogger.LogComparison(lf, "Jaccard", *jaccardMatch.Value, "Rating", util.InlineSpewT(jaccardMatch.Rating))
} else if m.Algorithm == "sorensen-dice" {
// Using Sorensen-Dice
// Get the matchs for each title variation
compResults := lop.Map(titleVariations, func(title *string, _ int) *comparison.SorensenDiceResult {
comps := make([]*comparison.SorensenDiceResult, 0)
if len(m.MediaContainer.engTitles) > 0 {
if eng, found := comparison.FindBestMatchWithSorensenDice(title, m.MediaContainer.engTitles); found {
comps = append(comps, eng)
}
}
if len(m.MediaContainer.romTitles) > 0 {
if rom, found := comparison.FindBestMatchWithSorensenDice(title, m.MediaContainer.romTitles); found {
comps = append(comps, rom)
}
}
if len(m.MediaContainer.synonyms) > 0 {
if syn, found := comparison.FindBestMatchWithSorensenDice(title, m.MediaContainer.synonyms); found {
comps = append(comps, syn)
}
}
var res *comparison.SorensenDiceResult
if len(comps) > 1 {
res = lo.Reduce(comps, func(prev *comparison.SorensenDiceResult, curr *comparison.SorensenDiceResult, _ int) *comparison.SorensenDiceResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, comps[0])
} else if len(comps) == 1 {
return comps[0]
}
return res
})
// Retrieve the match from all the title variations results
sdMatch = lo.Reduce(compResults, func(prev *comparison.SorensenDiceResult, curr *comparison.SorensenDiceResult, _ int) *comparison.SorensenDiceResult {
if prev.Rating > curr.Rating {
return prev
} else {
return curr
}
}, compResults[0])
//util.Spew(compResults)
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("match", sdMatch).
Interface("results", compResults).
Msg("Sorensen-Dice match")
}
m.ScanSummaryLogger.LogComparison(lf, "Sorensen-Dice", *sdMatch.Value, "Rating", util.InlineSpewT(sdMatch.Rating))
} else {
// Using Levenshtein
// Get the matches for each title variation
levCompResults := lop.Map(titleVariations, func(title *string, _ int) *comparison.LevenshteinResult {
comps := make([]*comparison.LevenshteinResult, 0)
if len(m.MediaContainer.engTitles) > 0 {
if eng, found := comparison.FindBestMatchWithLevenshtein(title, m.MediaContainer.engTitles); found {
comps = append(comps, eng)
}
}
if len(m.MediaContainer.romTitles) > 0 {
if rom, found := comparison.FindBestMatchWithLevenshtein(title, m.MediaContainer.romTitles); found {
comps = append(comps, rom)
}
}
if len(m.MediaContainer.synonyms) > 0 {
if syn, found := comparison.FindBestMatchWithLevenshtein(title, m.MediaContainer.synonyms); found {
comps = append(comps, syn)
}
}
var res *comparison.LevenshteinResult
if len(comps) > 1 {
res = lo.Reduce(comps, func(prev *comparison.LevenshteinResult, curr *comparison.LevenshteinResult, _ int) *comparison.LevenshteinResult {
if prev.Distance < curr.Distance {
return prev
} else {
return curr
}
}, comps[0])
} else if len(comps) == 1 {
return comps[0]
}
return res
})
levMatch = lo.Reduce(levCompResults, func(prev *comparison.LevenshteinResult, curr *comparison.LevenshteinResult, _ int) *comparison.LevenshteinResult {
if prev.Distance < curr.Distance {
return prev
} else {
return curr
}
}, levCompResults[0])
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Interface("match", levMatch).
Interface("results", levCompResults).
Int("distance", levMatch.Distance).
Msg("Levenshtein match")
}
m.ScanSummaryLogger.LogComparison(lf, "Levenshtein", *levMatch.Value, "Distance", util.InlineSpewT(levMatch.Distance))
}
//------------------
var mediaMatch *anime.NormalizedMedia
var found bool
finalRating := 0.0
if sdMatch != nil {
finalRating = sdMatch.Rating
mediaMatch, found = m.MediaContainer.GetMediaFromTitleOrSynonym(sdMatch.Value)
} else if jaccardMatch != nil {
finalRating = jaccardMatch.Rating
mediaMatch, found = m.MediaContainer.GetMediaFromTitleOrSynonym(jaccardMatch.Value)
} else {
dice := metrics.NewSorensenDice()
dice.CaseSensitive = false
dice.NgramSize = 1
finalRating = dice.Compare(*levMatch.OriginalValue, *levMatch.Value)
m.ScanSummaryLogger.LogComparison(lf, "Sorensen-Dice", *levMatch.Value, "Final rating", util.InlineSpewT(finalRating))
mediaMatch, found = m.MediaContainer.GetMediaFromTitleOrSynonym(levMatch.Value)
}
// After setting the mediaId, add the hook invocation
// Invoke ScanLocalFileMatched hook
event := &ScanLocalFileMatchedEvent{
LocalFile: lf,
Score: finalRating,
Match: mediaMatch,
Found: found,
}
hook.GlobalHookManager.OnScanLocalFileMatched().Trigger(event)
lf = event.LocalFile
mediaMatch = event.Match
found = event.Found
finalRating = event.Score
// Check if the hook overrode the match
if event.DefaultPrevented {
if m.ScanLogger != nil {
if mediaMatch != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Int("id", mediaMatch.ID).
Msg("Hook overrode match")
} else {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Msg("Hook overrode match, no match found")
}
}
if mediaMatch != nil {
lf.MediaId = mediaMatch.ID
} else {
lf.MediaId = 0
}
return
}
if !found {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.ErrorLevel).
Str("filename", lf.Name).
Msg("No media found from comparison result")
}
m.ScanSummaryLogger.LogFileNotMatched(lf, "No media found from comparison result")
return
}
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Str("title", mediaMatch.GetTitleSafe()).
Int("id", mediaMatch.ID).
Msg("Best match found")
}
if finalRating < m.Threshold {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Float64("rating", finalRating).
Float64("threshold", m.Threshold).
Msg("Best match Sorensen-Dice rating too low, un-matching file")
}
m.ScanSummaryLogger.LogFailedMatch(lf, "Rating too low, threshold is "+fmt.Sprintf("%f", m.Threshold))
return
}
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Str("filename", lf.Name).
Float64("rating", finalRating).
Float64("threshold", m.Threshold).
Msg("Best match rating high enough, matching file")
}
m.ScanSummaryLogger.LogSuccessfullyMatched(lf, mediaMatch.ID)
lf.MediaId = mediaMatch.ID
}
//----------------------------------------------------------------------------------------------------------------------
// validateMatches compares groups of local files' titles with the media titles and un-matches the local files that have a lower rating than the highest rating.
func (m *Matcher) validateMatches() {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.InfoLevel).Msg("Validating matches")
}
// Group local files by media ID
groups := lop.GroupBy(m.LocalFiles, func(localFile *anime.LocalFile) int {
return localFile.MediaId
})
// Remove the group with unmatched media
delete(groups, 0)
// Un-match files with lower ratings
p := pool.New()
for mId, files := range groups {
p.Go(func() {
if len(files) > 0 {
m.validateMatchGroup(mId, files)
}
})
}
p.Wait()
}
// validateMatchGroup compares the local files' titles under the same media
// with the media titles and un-matches the local files that have a lower rating.
// This is done to try and filter out wrong matches.
func (m *Matcher) validateMatchGroup(mediaId int, lfs []*anime.LocalFile) {
media, found := m.MediaContainer.GetMediaFromId(mediaId)
if !found {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.ErrorLevel).
Int("mediaId", mediaId).
Msg("Media not found in media container")
}
return
}
titles := media.GetAllTitles()
// Compare all files' parsed title with the media title
// Get the highest rating that will be used to un-match lower rated files
p := pool.NewWithResults[float64]()
for _, lf := range lfs {
p.Go(func() float64 {
t := lf.GetParsedTitle()
if comparison.ValueContainsSpecial(lf.Name) || comparison.ValueContainsNC(lf.Name) {
return 0
}
compRes, ok := comparison.FindBestMatchWithSorensenDice(&t, titles)
if ok {
return compRes.Rating
}
return 0
})
}
fileRatings := p.Wait()
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Int("mediaId", mediaId).
Any("fileRatings", fileRatings).
Msg("File ratings")
}
highestRating := lo.Reduce(fileRatings, func(prev float64, curr float64, _ int) float64 {
if prev > curr {
return prev
} else {
return curr
}
}, 0.0)
// Un-match files that have a lower rating than the ceiling
// UNLESS they are Special or NC
lop.ForEach(lfs, func(lf *anime.LocalFile, _ int) {
if !comparison.ValueContainsSpecial(lf.Name) && !comparison.ValueContainsNC(lf.Name) {
t := lf.GetParsedTitle()
if compRes, ok := comparison.FindBestMatchWithSorensenDice(&t, titles); ok {
// If the local file's rating is lower, un-match it
// Unless the difference is less than 0.7 (very lax since a lot of anime have very long names that can be truncated)
if compRes.Rating < highestRating && math.Abs(compRes.Rating-highestRating) > 0.7 {
lf.MediaId = 0
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.WarnLevel).
Int("mediaId", mediaId).
Str("filename", lf.Name).
Float64("rating", compRes.Rating).
Float64("highestRating", highestRating).
Msg("Rating does not match parameters, un-matching file")
}
m.ScanSummaryLogger.LogUnmatched(lf, fmt.Sprintf("Rating does not match parameters. File rating: %f, highest rating: %f", compRes.Rating, highestRating))
} else {
if m.ScanLogger != nil {
m.ScanLogger.LogMatcher(zerolog.DebugLevel).
Int("mediaId", mediaId).
Str("filename", lf.Name).
Float64("rating", compRes.Rating).
Float64("highestRating", highestRating).
Msg("Rating matches parameters, keeping file matched")
}
m.ScanSummaryLogger.LogMatchValidated(lf, mediaId)
}
}
}
})
}

View File

@@ -0,0 +1,244 @@
package scanner
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
)
// Add more media to this file if needed
// scanner_test_mock_data.json
func TestMatcher_MatchLocalFileWithMedia(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), nil)
if err != nil {
t.Fatal(err.Error())
}
allMedia := animeCollection.GetAllAnime()
dir := "E:/Anime"
tests := []struct {
name string
paths []string
expectedMediaId int
}{
{
// These local files are from "86 - Eighty Six Part 2" but should be matched with "86 - Eighty Six Part 1"
// because there is no indication for the part. However, the FileHydrator will fix this issue.
name: "should match with media id 116589",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: 116589, // 86 - Eighty Six Part 1
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: nil,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
}
err = matcher.MatchLocalFilesWithMedia()
if assert.NoError(t, err, "Error while matching local files") {
for _, lf := range lfs {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
}
})
}
}
func TestMatcher_MatchLocalFileWithMedia2(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt)
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
if err != nil {
t.Fatal(err.Error())
}
dir := "E:/Anime"
tests := []struct {
name string
paths []string
expectedMediaId int
otherMediaIds []int
}{
{
name: "Kono Subarashii Sekai ni Shukufuku wo! - 21202",
paths: []string{
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo!/Kono Subarashii Sekai ni Shukufuku wo! (01-10) [1080p] (Batch)/[HorribleSubs] Kono Subarashii Sekai ni Shukufuku wo! - 01 [1080p].mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo!/Kono Subarashii Sekai ni Shukufuku wo! (01-10) [1080p] (Batch)/[HorribleSubs] Kono Subarashii Sekai ni Shukufuku wo! - 02 [1080p].mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo!/Kono Subarashii Sekai ni Shukufuku wo! (01-10) [1080p] (Batch)/[HorribleSubs] Kono Subarashii Sekai ni Shukufuku wo! - 03 [1080p].mkv",
},
expectedMediaId: 21202, //
},
{
name: "Kono Subarashii Sekai ni Shukufuku wo! 2 - 21699",
paths: []string{
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo! 2/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02E01.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA.mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo! 2/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02E02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA.mkv",
"E:/Anime/Kono Subarashii Sekai ni Shukufuku wo! 2/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA/KonoSuba.God's.Blessing.On.This.Wonderful.World.S02E03.1080p.BluRay.10-Bit.Dual-Audio.FLAC2.0.x265-YURASUKA.mkv",
},
expectedMediaId: 21699,
},
{
name: "Demon Slayer: Kimetsu no Yaiba Entertainment District Arc - 142329",
paths: []string{
"E:/Anime/Kimetsu no Yaiba Yuukaku-hen/[Salieri] Demon Slayer - Kimetsu No Yaiba - S3 - Entertainment District - BD (1080P) (HDR) [Dual-Audio]/[Salieri] Demon Slayer S3 - Kimetsu No Yaiba- Entertainment District - 03 (1080P) (HDR) [Dual-Audio].mkv",
},
expectedMediaId: 142329,
},
{
name: "KnY 145139",
paths: []string{
"E:/Anime/Kimetsu no Yaiba Katanakaji no Sato-hen/Demon Slayer S03 1080p Dual Audio BDRip 10 bits DD x265-EMBER/S03E07-Awful Villain [703A5C5B].mkv",
},
expectedMediaId: 145139,
},
{
name: "MT 108465",
paths: []string{
"E:/Anime/Mushoku Tensei Isekai Ittara Honki Dasu/Mushoku Tensei S01+SP 1080p Dual Audio BDRip 10 bits DDP x265-EMBER/Mushoku Tensei S01P01 1080p Dual Audio BDRip 10 bits DD x265-EMBER/S01E01-Jobless Reincarnation V2 [911C3607].mkv",
},
expectedMediaId: 108465,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Add media to collection if it doesn't exist
allMedia := animeCollection.GetAllAnime()
hasExpectedMediaId := false
for _, media := range allMedia {
if media.ID == tt.expectedMediaId {
hasExpectedMediaId = true
break
}
}
if !hasExpectedMediaId {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, tt.expectedMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: lo.ToPtr(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
for _, otherMediaId := range tt.otherMediaIds {
hasOtherMediaId := false
for _, media := range allMedia {
if media.ID == otherMediaId {
hasOtherMediaId = true
break
}
}
if !hasOtherMediaId {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, otherMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: lo.ToPtr(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
}
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: nil,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
}
err = matcher.MatchLocalFilesWithMedia()
if assert.NoError(t, err, "Error while matching local files") {
for _, lf := range lfs {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
}
})
}
}

View File

@@ -0,0 +1,146 @@
package scanner
import (
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
"seanime/internal/api/anilist"
"seanime/internal/library/anime"
"seanime/internal/util/comparison"
"strings"
)
type (
MediaContainerOptions struct {
AllMedia []*anilist.CompleteAnime
ScanLogger *ScanLogger
}
MediaContainer struct {
NormalizedMedia []*anime.NormalizedMedia
ScanLogger *ScanLogger
engTitles []*string
romTitles []*string
synonyms []*string
allMedia []*anilist.CompleteAnime
}
)
// NewMediaContainer will create a list of all English titles, Romaji titles, and synonyms from all anilist.BaseAnime (used by Matcher).
//
// The list will include all anilist.BaseAnime and their relations (prequels, sequels, spin-offs, etc...) as NormalizedMedia.
//
// It also provides helper functions to get a NormalizedMedia from a title or synonym (used by FileHydrator).
func NewMediaContainer(opts *MediaContainerOptions) *MediaContainer {
mc := new(MediaContainer)
mc.ScanLogger = opts.ScanLogger
mc.NormalizedMedia = make([]*anime.NormalizedMedia, 0)
normalizedMediaMap := make(map[int]*anime.NormalizedMedia)
for _, m := range opts.AllMedia {
normalizedMediaMap[m.ID] = anime.NewNormalizedMedia(m.ToBaseAnime())
if m.Relations != nil && m.Relations.Edges != nil && len(m.Relations.Edges) > 0 {
for _, edgeM := range m.Relations.Edges {
if edgeM.Node == nil || edgeM.Node.Format == nil || edgeM.RelationType == nil {
continue
}
if *edgeM.Node.Format != anilist.MediaFormatMovie &&
*edgeM.Node.Format != anilist.MediaFormatOva &&
*edgeM.Node.Format != anilist.MediaFormatSpecial &&
*edgeM.Node.Format != anilist.MediaFormatTv {
continue
}
if *edgeM.RelationType != anilist.MediaRelationPrequel &&
*edgeM.RelationType != anilist.MediaRelationSequel &&
*edgeM.RelationType != anilist.MediaRelationSpinOff &&
*edgeM.RelationType != anilist.MediaRelationAlternative &&
*edgeM.RelationType != anilist.MediaRelationParent {
continue
}
// DEVNOTE: Edges fetched from the AniList AnimeCollection query do not contain NextAiringEpisode
// Make sure we don't overwrite the original media in the map that contains NextAiringEpisode
if _, found := normalizedMediaMap[edgeM.Node.ID]; !found {
normalizedMediaMap[edgeM.Node.ID] = anime.NewNormalizedMedia(edgeM.Node)
}
}
}
}
for _, m := range normalizedMediaMap {
mc.NormalizedMedia = append(mc.NormalizedMedia, m)
}
engTitles := lop.Map(mc.NormalizedMedia, func(m *anime.NormalizedMedia, index int) *string {
if m.Title.English != nil {
return m.Title.English
}
return new(string)
})
romTitles := lop.Map(mc.NormalizedMedia, func(m *anime.NormalizedMedia, index int) *string {
if m.Title.Romaji != nil {
return m.Title.Romaji
}
return new(string)
})
_synonymsArr := lop.Map(mc.NormalizedMedia, func(m *anime.NormalizedMedia, index int) []*string {
if m.Synonyms != nil {
return m.Synonyms
}
return make([]*string, 0)
})
synonyms := lo.Flatten(_synonymsArr)
engTitles = lo.Filter(engTitles, func(s *string, i int) bool { return s != nil && len(*s) > 0 })
romTitles = lo.Filter(romTitles, func(s *string, i int) bool { return s != nil && len(*s) > 0 })
synonyms = lo.Filter(synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
mc.engTitles = engTitles
mc.romTitles = romTitles
mc.synonyms = synonyms
mc.allMedia = opts.AllMedia
if mc.ScanLogger != nil {
mc.ScanLogger.LogMediaContainer(zerolog.InfoLevel).
Any("inputCount", len(opts.AllMedia)).
Any("mediaCount", len(mc.NormalizedMedia)).
Any("titles", len(mc.engTitles)+len(mc.romTitles)+len(mc.synonyms)).
Msg("Created media container")
}
return mc
}
func (mc *MediaContainer) GetMediaFromTitleOrSynonym(title *string) (*anime.NormalizedMedia, bool) {
if title == nil {
return nil, false
}
t := strings.ToLower(*title)
res, found := lo.Find(mc.NormalizedMedia, func(m *anime.NormalizedMedia) bool {
if m.HasEnglishTitle() && t == strings.ToLower(*m.Title.English) {
return true
}
if m.HasRomajiTitle() && t == strings.ToLower(*m.Title.Romaji) {
return true
}
if m.HasSynonyms() {
for _, syn := range m.Synonyms {
if t == strings.ToLower(*syn) {
return true
}
}
}
return false
})
return res, found
}
func (mc *MediaContainer) GetMediaFromId(id int) (*anime.NormalizedMedia, bool) {
res, found := lo.Find(mc.NormalizedMedia, func(m *anime.NormalizedMedia) bool {
if m.ID == id {
return true
}
return false
})
return res, found
}

View File

@@ -0,0 +1,352 @@
package scanner
import (
"context"
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/mal"
"seanime/internal/api/metadata"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"seanime/internal/util/parallel"
"time"
"github.com/davecgh/go-spew/spew"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
)
// MediaFetcher holds all anilist.BaseAnime that will be used for the comparison process
type MediaFetcher struct {
AllMedia []*anilist.CompleteAnime
CollectionMediaIds []int
UnknownMediaIds []int // Media IDs that are not in the user's collection
AnimeCollectionWithRelations *anilist.AnimeCollectionWithRelations
ScanLogger *ScanLogger
}
type MediaFetcherOptions struct {
Enhanced bool
Platform platform.Platform
MetadataProvider metadata.Provider
LocalFiles []*anime.LocalFile
CompleteAnimeCache *anilist.CompleteAnimeCache
Logger *zerolog.Logger
AnilistRateLimiter *limiter.Limiter
DisableAnimeCollection bool
ScanLogger *ScanLogger
}
// NewMediaFetcher
// Calling this method will kickstart the fetch process
// When enhancing is false, MediaFetcher.AllMedia will be all anilist.BaseAnime from the user's AniList collection.
// When enhancing is true, MediaFetcher.AllMedia will be anilist.BaseAnime for each unique, parsed anime title and their relations.
func NewMediaFetcher(ctx context.Context, opts *MediaFetcherOptions) (ret *MediaFetcher, retErr error) {
defer util.HandlePanicInModuleWithError("library/scanner/NewMediaFetcher", &retErr)
if opts.Platform == nil ||
opts.LocalFiles == nil ||
opts.CompleteAnimeCache == nil ||
opts.MetadataProvider == nil ||
opts.Logger == nil ||
opts.AnilistRateLimiter == nil {
return nil, errors.New("missing options")
}
mf := new(MediaFetcher)
mf.ScanLogger = opts.ScanLogger
opts.Logger.Debug().
Any("enhanced", opts.Enhanced).
Msg("media fetcher: Creating media fetcher")
if mf.ScanLogger != nil {
mf.ScanLogger.LogMediaFetcher(zerolog.InfoLevel).
Msg("Creating media fetcher")
}
// Invoke ScanMediaFetcherStarted hook
event := &ScanMediaFetcherStartedEvent{
Enhanced: opts.Enhanced,
}
hook.GlobalHookManager.OnScanMediaFetcherStarted().Trigger(event)
opts.Enhanced = event.Enhanced
// +---------------------+
// | All media |
// +---------------------+
// Fetch latest user's AniList collection
animeCollectionWithRelations, err := opts.Platform.GetAnimeCollectionWithRelations(ctx)
if err != nil {
return nil, err
}
mf.AnimeCollectionWithRelations = animeCollectionWithRelations
mf.AllMedia = make([]*anilist.CompleteAnime, 0)
if !opts.DisableAnimeCollection {
// For each collection entry, append the media to AllMedia
for _, list := range animeCollectionWithRelations.GetMediaListCollection().GetLists() {
for _, entry := range list.GetEntries() {
mf.AllMedia = append(mf.AllMedia, entry.GetMedia())
// +---------------------+
// | Cache |
// +---------------------+
// We assume the CompleteAnimeCache is empty. Add media to cache.
opts.CompleteAnimeCache.Set(entry.GetMedia().ID, entry.GetMedia())
}
}
}
if mf.ScanLogger != nil {
mf.ScanLogger.LogMediaFetcher(zerolog.DebugLevel).
Int("count", len(mf.AllMedia)).
Msg("Fetched media from AniList collection")
}
//--------------------------------------------
// Get the media IDs from the collection
mf.CollectionMediaIds = lop.Map(mf.AllMedia, func(m *anilist.CompleteAnime, index int) int {
return m.ID
})
//--------------------------------------------
// +---------------------+
// | Enhanced |
// +---------------------+
// If enhancing is on, scan media from local files and get their relations
if opts.Enhanced {
_, ok := FetchMediaFromLocalFiles(
ctx,
opts.Platform,
opts.LocalFiles,
opts.CompleteAnimeCache, // CompleteAnimeCache will be populated on success
opts.MetadataProvider,
opts.AnilistRateLimiter,
mf.ScanLogger,
)
if ok {
// We assume the CompleteAnimeCache is populated. We overwrite AllMedia with the cache content.
// This is because the cache will contain all media from the user's collection AND scanned ones
mf.AllMedia = make([]*anilist.CompleteAnime, 0)
opts.CompleteAnimeCache.Range(func(key int, value *anilist.CompleteAnime) bool {
mf.AllMedia = append(mf.AllMedia, value)
return true
})
}
}
// +---------------------+
// | Unknown media |
// +---------------------+
// Media that are not in the user's collection
// Get the media that are not in the user's collection
unknownMedia := lo.Filter(mf.AllMedia, func(m *anilist.CompleteAnime, _ int) bool {
return !lo.Contains(mf.CollectionMediaIds, m.ID)
})
// Get the media IDs that are not in the user's collection
mf.UnknownMediaIds = lop.Map(unknownMedia, func(m *anilist.CompleteAnime, _ int) int {
return m.ID
})
if mf.ScanLogger != nil {
mf.ScanLogger.LogMediaFetcher(zerolog.DebugLevel).
Int("unknownMediaCount", len(mf.UnknownMediaIds)).
Int("allMediaCount", len(mf.AllMedia)).
Msg("Finished creating media fetcher")
}
// Invoke ScanMediaFetcherCompleted hook
completedEvent := &ScanMediaFetcherCompletedEvent{
AllMedia: mf.AllMedia,
UnknownMediaIds: mf.UnknownMediaIds,
}
_ = hook.GlobalHookManager.OnScanMediaFetcherCompleted().Trigger(completedEvent)
mf.AllMedia = completedEvent.AllMedia
mf.UnknownMediaIds = completedEvent.UnknownMediaIds
return mf, nil
}
//----------------------------------------------------------------------------------------------------------------------
// FetchMediaFromLocalFiles gets media and their relations from local file titles.
// It retrieves unique titles from local files,
// fetches mal.SearchResultAnime from MAL,
// uses these search results to get AniList IDs using metadata.AnimeMetadata mappings,
// queries AniList to retrieve all anilist.BaseAnime using anilist.GetBaseAnimeById and their relations using anilist.FetchMediaTree.
// It does not return an error if one of the steps fails.
// It returns the scanned media and a boolean indicating whether the process was successful.
func FetchMediaFromLocalFiles(
ctx context.Context,
platform platform.Platform,
localFiles []*anime.LocalFile,
completeAnime *anilist.CompleteAnimeCache,
metadataProvider metadata.Provider,
anilistRateLimiter *limiter.Limiter,
scanLogger *ScanLogger,
) (ret []*anilist.CompleteAnime, ok bool) {
defer util.HandlePanicInModuleThen("library/scanner/FetchMediaFromLocalFiles", func() {
ok = false
})
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Msg("Fetching media from local files")
}
rateLimiter := limiter.NewLimiter(time.Second, 20)
rateLimiter2 := limiter.NewLimiter(time.Second, 20)
// Get titles
titles := anime.GetUniqueAnimeTitlesFromLocalFiles(localFiles)
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("context", spew.Sprint(titles)).
Msg("Parsed titles from local files")
}
// +---------------------+
// | MyAnimeList |
// +---------------------+
// Get MAL media from titles
malSR := parallel.NewSettledResults[string, *mal.SearchResultAnime](titles)
malSR.AllSettled(func(title string, index int) (*mal.SearchResultAnime, error) {
rateLimiter.Wait()
return mal.AdvancedSearchWithMAL(title)
})
malRes, ok := malSR.GetFulfilledResults()
if !ok {
return nil, false
}
// Get duplicate-free version of MAL media
malMedia := lo.UniqBy(*malRes, func(res *mal.SearchResultAnime) int { return res.ID })
// Get the MAL media IDs
malIds := lop.Map(malMedia, func(n *mal.SearchResultAnime, index int) int { return n.ID })
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("context", spew.Sprint(lo.Map(malMedia, func(n *mal.SearchResultAnime, _ int) string {
return n.Name
}))).
Msg("Fetched MAL media from titles")
}
// +---------------------+
// | Animap |
// +---------------------+
// Get Animap mappings for each MAL ID and store them in `metadataProvider`
// This step is necessary because MAL doesn't provide AniList IDs and some MAL media don't exist on AniList
lop.ForEach(malIds, func(id int, index int) {
rateLimiter2.Wait()
//_, _ = metadataProvider.GetAnimeMetadata(metadata.MalPlatform, id)
_, _ = metadataProvider.GetCache().GetOrSet(metadata.GetAnimeMetadataCacheKey(metadata.MalPlatform, id), func() (*metadata.AnimeMetadata, error) {
res, err := metadataProvider.GetAnimeMetadata(metadata.MalPlatform, id)
return res, err
})
})
// +---------------------+
// | AniList |
// +---------------------+
// Retrieve the AniList IDs from the Animap mappings stored in the cache
anilistIds := make([]int, 0)
metadataProvider.GetCache().Range(func(key string, value *metadata.AnimeMetadata) bool {
if value != nil {
anilistIds = append(anilistIds, value.GetMappings().AnilistId)
}
return true
})
// Fetch all media from the AniList IDs
anilistMedia := make([]*anilist.CompleteAnime, 0)
lop.ForEach(anilistIds, func(id int, index int) {
anilistRateLimiter.Wait()
media, err := platform.GetAnimeWithRelations(ctx, id)
if err == nil {
anilistMedia = append(anilistMedia, media)
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("title", media.GetTitleSafe()).
Msg("Fetched Anilist media from MAL id")
}
} else {
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.WarnLevel).
Str("module", "Enhanced").
Int("id", id).
Msg("Failed to fetch Anilist media from MAL id")
}
}
})
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.DebugLevel).
Str("module", "Enhanced").
Str("context", spew.Sprint(lo.Map(anilistMedia, func(n *anilist.CompleteAnime, _ int) string {
return n.GetTitleSafe()
}))).
Msg("Fetched Anilist media from MAL ids")
}
// +---------------------+
// | MediaTree |
// +---------------------+
// Create a new tree that will hold the fetched relations
// /!\ This is redundant because we already have a cache, but `FetchMediaTree` needs its
tree := anilist.NewCompleteAnimeRelationTree()
start := time.Now()
// For each media, fetch its relations
// The relations are fetched in parallel and added to `completeAnime`
lop.ForEach(anilistMedia, func(m *anilist.CompleteAnime, index int) {
// We ignore errors because we want to continue even if one of the media fails
_ = m.FetchMediaTree(anilist.FetchMediaTreeAll, platform.GetAnilistClient(), anilistRateLimiter, tree, completeAnime)
})
// +---------------------+
// | Cache |
// +---------------------+
// Retrieve all media from the cache
scanned := make([]*anilist.CompleteAnime, 0)
completeAnime.Range(func(key int, value *anilist.CompleteAnime) bool {
scanned = append(scanned, value)
return true
})
if scanLogger != nil {
scanLogger.LogMediaFetcher(zerolog.InfoLevel).
Str("module", "Enhanced").
Int("ms", int(time.Since(start).Milliseconds())).
Int("count", len(scanned)).
Str("context", spew.Sprint(lo.Map(scanned, func(n *anilist.CompleteAnime, _ int) string {
return n.GetTitleSafe()
}))).
Msg("Finished fetching media from local files")
}
return scanned, true
}

View File

@@ -0,0 +1,273 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
)
func TestNewMediaFetcher(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metadataProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
dir := "E:/Anime"
tests := []struct {
name string
paths []string
enhanced bool
disableAnimeCollection bool
}{
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
enhanced: false,
disableAnimeCollection: false,
},
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
enhanced: true,
disableAnimeCollection: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaFetcher |
// +---------------------+
mf, err := NewMediaFetcher(t.Context(), &MediaFetcherOptions{
Enhanced: tt.enhanced,
Platform: anilistPlatform,
LocalFiles: lfs,
CompleteAnimeCache: completeAnimeCache,
MetadataProvider: metadataProvider,
Logger: util.NewLogger(),
AnilistRateLimiter: anilistRateLimiter,
ScanLogger: scanLogger,
DisableAnimeCollection: tt.disableAnimeCollection,
})
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: mf.AllMedia,
ScanLogger: scanLogger,
})
for _, m := range mc.NormalizedMedia {
t.Log(m.GetTitleSafe())
}
})
}
}
func TestNewEnhancedMediaFetcher(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metaProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
dir := "E:/Anime"
tests := []struct {
name string
paths []string
enhanced bool
}{
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
enhanced: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewScanLogger("./logs")
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaFetcher |
// +---------------------+
mf, err := NewMediaFetcher(t.Context(), &MediaFetcherOptions{
Enhanced: tt.enhanced,
Platform: anilistPlatform,
LocalFiles: lfs,
CompleteAnimeCache: completeAnimeCache,
MetadataProvider: metaProvider,
Logger: util.NewLogger(),
AnilistRateLimiter: anilistRateLimiter,
ScanLogger: scanLogger,
})
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: mf.AllMedia,
ScanLogger: scanLogger,
})
for _, m := range mc.NormalizedMedia {
t.Log(m.GetTitleSafe())
}
})
}
}
func TestFetchMediaFromLocalFiles(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metaProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
tests := []struct {
name string
paths []string
expectedMediaId []int
}{
{
name: "86 - Eighty Six Part 1 & 2",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: []int{116589, 131586}, // 86 - Eighty Six Part 1 & 2
},
}
dir := "E:/Anime"
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewScanLogger("./logs")
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
lfs = append(lfs, lf)
}
// +--------------------------+
// | FetchMediaFromLocalFiles |
// +--------------------------+
media, ok := FetchMediaFromLocalFiles(
t.Context(),
anilistPlatform,
lfs,
completeAnimeCache,
metaProvider,
anilistRateLimiter,
scanLogger,
)
if !ok {
t.Fatal("could not fetch media from local files")
}
ids := lo.Map(media, func(k *anilist.CompleteAnime, _ int) int {
return k.ID
})
// Test if all expected media IDs are present
for _, id := range tt.expectedMediaId {
assert.Contains(t, ids, id)
}
t.Log("Media IDs:")
for _, m := range media {
t.Log(m.GetTitleSafe())
}
})
}
}

View File

@@ -0,0 +1,226 @@
package scanner
import (
"errors"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/util/limiter"
"sort"
"time"
"github.com/samber/lo"
"github.com/sourcegraph/conc/pool"
)
type (
MediaTreeAnalysisOptions struct {
tree *anilist.CompleteAnimeRelationTree
metadataProvider metadata.Provider
rateLimiter *limiter.Limiter
}
MediaTreeAnalysis struct {
branches []*MediaTreeAnalysisBranch
}
MediaTreeAnalysisBranch struct {
media *anilist.CompleteAnime
animeMetadata *metadata.AnimeMetadata
// The second absolute episode number of the first episode
// Sometimes, the metadata provider may have a 'true' absolute episode number and a 'part' absolute episode number
// 'part' absolute episode numbers might be used for "Part 2s" of a season
minPartAbsoluteEpisodeNumber int
maxPartAbsoluteEpisodeNumber int
minAbsoluteEpisode int
maxAbsoluteEpisode int
totalEpisodeCount int
noAbsoluteEpisodesFound bool
}
)
// NewMediaTreeAnalysis will analyze the media tree and create and store a MediaTreeAnalysisBranch for each media in the tree.
// Each MediaTreeAnalysisBranch will contain the min and max absolute episode number for the media.
// The min and max absolute episode numbers are used to get the relative episode number from an absolute episode number.
func NewMediaTreeAnalysis(opts *MediaTreeAnalysisOptions) (*MediaTreeAnalysis, error) {
relations := make([]*anilist.CompleteAnime, 0)
opts.tree.Range(func(key int, value *anilist.CompleteAnime) bool {
relations = append(relations, value)
return true
})
// Get Animap data for all related media in the tree
// With each Animap media, get the min and max absolute episode number
// Create new MediaTreeAnalysisBranch for each Animap media
p := pool.NewWithResults[*MediaTreeAnalysisBranch]().WithErrors()
for _, rel := range relations {
p.Go(func() (*MediaTreeAnalysisBranch, error) {
opts.rateLimiter.Wait()
animeMetadata, err := opts.metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, rel.ID)
if err != nil {
return nil, err
}
// Get the first episode
firstEp, ok := animeMetadata.Episodes["1"]
if !ok {
return nil, errors.New("no first episode")
}
// discrepancy: "seasonNumber":1,"episodeNumber":12,"absoluteEpisodeNumber":13,
// this happens when the media has a separate entry but is technically the same season
// when we detect this, we should use the "episodeNumber" as the absoluteEpisodeNumber
// this is a hacky fix, but it works for the cases I've seen so far
usePartEpisodeNumber := firstEp.EpisodeNumber > 1 && firstEp.AbsoluteEpisodeNumber-firstEp.EpisodeNumber > 1
partAbsoluteEpisodeNumber := 0
maxPartAbsoluteEpisodeNumber := 0
if usePartEpisodeNumber {
partAbsoluteEpisodeNumber = firstEp.EpisodeNumber
maxPartAbsoluteEpisodeNumber = partAbsoluteEpisodeNumber + animeMetadata.GetMainEpisodeCount() - 1
}
// If the first episode exists and has a valid absolute episode number, create a new MediaTreeAnalysisBranch
if animeMetadata.Episodes != nil {
return &MediaTreeAnalysisBranch{
media: rel,
animeMetadata: animeMetadata,
minPartAbsoluteEpisodeNumber: partAbsoluteEpisodeNumber,
maxPartAbsoluteEpisodeNumber: maxPartAbsoluteEpisodeNumber,
minAbsoluteEpisode: firstEp.AbsoluteEpisodeNumber,
// The max absolute episode number is the first episode's absolute episode number plus the total episode count minus 1
// We subtract 1 because the first episode's absolute episode number is already included in the total episode count
// e.g, if the first episode's absolute episode number is 13 and the total episode count is 12, the max absolute episode number is 24
maxAbsoluteEpisode: firstEp.AbsoluteEpisodeNumber + (animeMetadata.GetMainEpisodeCount() - 1),
totalEpisodeCount: animeMetadata.GetMainEpisodeCount(),
noAbsoluteEpisodesFound: firstEp.AbsoluteEpisodeNumber == 0,
}, nil
}
return nil, errors.New("could not analyze media tree branch")
})
}
branches, _ := p.Wait()
if branches == nil || len(branches) == 0 {
return nil, errors.New("no branches found")
}
return &MediaTreeAnalysis{branches: branches}, nil
}
// getRelativeEpisodeNumber uses the MediaTreeAnalysis to get the relative episode number for an absolute episode number
func (o *MediaTreeAnalysis) getRelativeEpisodeNumber(abs int) (relativeEp int, mediaId int, ok bool) {
isPartAbsolute := false
// Find the MediaTreeAnalysisBranch that contains the absolute episode number
branch, ok := lo.Find(o.branches, func(n *MediaTreeAnalysisBranch) bool {
// First check if the partAbsoluteEpisodeNumber is set
if n.minPartAbsoluteEpisodeNumber > 0 && n.maxPartAbsoluteEpisodeNumber > 0 {
// If it is, check if the absolute episode number given is the same as the partAbsoluteEpisodeNumber
// If it is, return true
if n.minPartAbsoluteEpisodeNumber <= abs && n.maxPartAbsoluteEpisodeNumber >= abs {
isPartAbsolute = true
return true
}
}
// Else, check if the absolute episode number given is within the min and max absolute episode numbers of the branch
if n.minAbsoluteEpisode <= abs && n.maxAbsoluteEpisode >= abs {
return true
}
return false
})
if !ok {
// Sort branches manually
type branchByFirstEpDate struct {
branch *MediaTreeAnalysisBranch
firstEpDate time.Time
minAbsoluteEpisode int
maxAbsoluteEpisode int
}
branches := make([]*branchByFirstEpDate, 0)
for _, b := range o.branches {
// Get the first episode date
firstEp, ok := b.animeMetadata.Episodes["1"]
if !ok {
continue
}
// parse date
t, err := time.Parse(time.DateOnly, firstEp.AirDate)
if err != nil {
continue
}
branches = append(branches, &branchByFirstEpDate{
branch: b,
firstEpDate: t,
})
}
// Sort branches by first episode date
// If the first episode date is not available, the branch will be placed at the end
sort.Slice(branches, func(i, j int) bool {
return branches[i].firstEpDate.Before(branches[j].firstEpDate)
})
// Hydrate branches with min and max absolute episode numbers
visited := make(map[int]*branchByFirstEpDate)
for idx, b := range branches {
visited[idx] = b
if v, ok := visited[idx-1]; ok {
b.minAbsoluteEpisode = v.maxAbsoluteEpisode + 1
b.maxAbsoluteEpisode = b.minAbsoluteEpisode + b.branch.totalEpisodeCount - 1
continue
}
b.minAbsoluteEpisode = 1
b.maxAbsoluteEpisode = b.minAbsoluteEpisode + b.branch.totalEpisodeCount - 1
}
for _, b := range branches {
if b.minAbsoluteEpisode <= abs && b.maxAbsoluteEpisode >= abs {
b.branch.minAbsoluteEpisode = b.minAbsoluteEpisode
b.branch.maxAbsoluteEpisode = b.maxAbsoluteEpisode
branch = b.branch
relativeEp = abs - (branch.minAbsoluteEpisode - 1)
mediaId = branch.media.ID
ok = true
return
}
}
return 0, 0, false
}
if isPartAbsolute {
// Let's say the media has 12 episodes and the file is "episode 13"
// If the [partAbsoluteEpisodeNumber] is 13, then the [relativeEp] will be 1, we can safely ignore the [absoluteEpisodeNumber]
// e.g. 13 - (13-1) = 1
relativeEp = abs - (branch.minPartAbsoluteEpisodeNumber - 1)
} else {
// Let's say the media has 12 episodes and the file is "episode 38"
// The [minAbsoluteEpisode] will be 38 and the [relativeEp] will be 1
// e.g. 38 - (38-1) = 1
relativeEp = abs - (branch.minAbsoluteEpisode - 1)
}
mediaId = branch.media.ID
return
}
func (o *MediaTreeAnalysis) printBranches() (str string) {
str = "["
for _, branch := range o.branches {
str += fmt.Sprintf("media: '%s', minAbsoluteEpisode: %d, maxAbsoluteEpisode: %d, totalEpisodeCount: %d; ", branch.media.GetTitleSafe(), branch.minAbsoluteEpisode, branch.maxAbsoluteEpisode, branch.totalEpisodeCount)
}
if len(o.branches) > 0 {
str = str[:len(str)-2]
}
str += "]"
return str
}

View File

@@ -0,0 +1,170 @@
package scanner
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/test_utils"
"seanime/internal/util/limiter"
"testing"
"time"
)
func TestMediaTreeAnalysis(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
anilistRateLimiter := limiter.NewAnilistLimiter()
tree := anilist.NewCompleteAnimeRelationTree()
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mediaId int
absoluteEpisodeNumber int
expectedRelativeEpisodeNumber int
}{
{
name: "Media Tree Analysis for 86 - Eighty Six Part 2",
mediaId: 131586, // 86 - Eighty Six Part 2
absoluteEpisodeNumber: 23,
expectedRelativeEpisodeNumber: 12,
},
{
name: "Oshi no Ko Season 2",
mediaId: 150672, // 86 - Eighty Six Part 2
absoluteEpisodeNumber: 12,
expectedRelativeEpisodeNumber: 1,
},
{
name: "Re:zero",
mediaId: 21355, // Re:Zero kara Hajimeru Isekai Seikatsu
absoluteEpisodeNumber: 51,
expectedRelativeEpisodeNumber: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mediaF, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
if err != nil {
t.Fatal("expected media, got not found")
}
media := mediaF.GetMedia()
// +---------------------+
// | MediaTree |
// +---------------------+
err = media.FetchMediaTree(
anilist.FetchMediaTreeAll,
anilistClient,
anilistRateLimiter,
tree,
anilist.NewCompleteAnimeCache(),
)
if err != nil {
t.Fatal("expected media tree, got error:", err.Error())
}
// +---------------------+
// | MediaTreeAnalysis |
// +---------------------+
mta, err := NewMediaTreeAnalysis(&MediaTreeAnalysisOptions{
tree: tree,
metadataProvider: metadataProvider,
rateLimiter: limiter.NewLimiter(time.Minute, 25),
})
if err != nil {
t.Fatal("expected media tree analysis, got error:", err.Error())
}
// +---------------------+
// | Relative Episode |
// +---------------------+
relEp, _, ok := mta.getRelativeEpisodeNumber(tt.absoluteEpisodeNumber)
if assert.Truef(t, ok, "expected relative episode number %v for absolute episode number %v, nothing found", tt.expectedRelativeEpisodeNumber, tt.absoluteEpisodeNumber) {
assert.Equal(t, tt.expectedRelativeEpisodeNumber, relEp)
}
})
}
}
func TestMediaTreeAnalysis2(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
anilistRateLimiter := limiter.NewAnilistLimiter()
tree := anilist.NewCompleteAnimeRelationTree()
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mediaId int
}{
{
name: "Media Tree Analysis",
mediaId: 375, // Soreyuke! Uchuu Senkan Yamamoto Yohko
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
media, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
if err != nil {
t.Fatal("expected media, got error:", err.Error())
}
// +---------------------+
// | MediaTree |
// +---------------------+
err = media.GetMedia().FetchMediaTree(
anilist.FetchMediaTreeAll,
anilistClient,
anilistRateLimiter,
tree,
anilist.NewCompleteAnimeCache(),
)
if err != nil {
t.Fatal("expected media tree, got error:", err.Error())
}
// +---------------------+
// | MediaTreeAnalysis |
// +---------------------+
mta, err := NewMediaTreeAnalysis(&MediaTreeAnalysisOptions{
tree: tree,
metadataProvider: metadataProvider,
rateLimiter: limiter.NewLimiter(time.Minute, 25),
})
if err != nil {
t.Fatal("expected media tree analysis, got error:", err.Error())
}
t.Log(spew.Sdump(mta))
})
}
}

View File

@@ -0,0 +1,421 @@
package scanner
import (
"context"
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/events"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/library/filesystem"
"seanime/internal/library/summary"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"sync"
"time"
"github.com/rs/zerolog"
"github.com/samber/lo"
lop "github.com/samber/lo/parallel"
)
type Scanner struct {
DirPath string
OtherDirPaths []string
Enhanced bool
Platform platform.Platform
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
ExistingLocalFiles []*anime.LocalFile
SkipLockedFiles bool
SkipIgnoredFiles bool
ScanSummaryLogger *summary.ScanSummaryLogger
ScanLogger *ScanLogger
MetadataProvider metadata.Provider
MatchingThreshold float64
MatchingAlgorithm string
}
// Scan will scan the directory and return a list of anime.LocalFile.
func (scn *Scanner) Scan(ctx context.Context) (lfs []*anime.LocalFile, err error) {
defer util.HandlePanicWithError(&err)
go func() {
anime.EpisodeCollectionFromLocalFilesCache.Clear()
}()
scn.WSEventManager.SendEvent(events.EventScanProgress, 0)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Retrieving local files...")
completeAnimeCache := anilist.NewCompleteAnimeCache()
// Create a new Anilist rate limiter
anilistRateLimiter := limiter.NewAnilistLimiter()
if scn.ScanSummaryLogger == nil {
scn.ScanSummaryLogger = summary.NewScanSummaryLogger()
}
scn.Logger.Debug().Msg("scanner: Starting scan")
scn.WSEventManager.SendEvent(events.EventScanProgress, 10)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Retrieving local files...")
startTime := time.Now()
// Invoke ScanStarted hook
event := &ScanStartedEvent{
LibraryPath: scn.DirPath,
OtherLibraryPaths: scn.OtherDirPaths,
Enhanced: scn.Enhanced,
SkipLocked: scn.SkipLockedFiles,
SkipIgnored: scn.SkipIgnoredFiles,
LocalFiles: scn.ExistingLocalFiles,
}
_ = hook.GlobalHookManager.OnScanStarted().Trigger(event)
scn.DirPath = event.LibraryPath
scn.OtherDirPaths = event.OtherLibraryPaths
scn.Enhanced = event.Enhanced
scn.SkipLockedFiles = event.SkipLocked
scn.SkipIgnoredFiles = event.SkipIgnored
// Default prevented, return the local files
if event.DefaultPrevented {
// Invoke ScanCompleted hook
completedEvent := &ScanCompletedEvent{
LocalFiles: event.LocalFiles,
Duration: int(time.Since(startTime).Milliseconds()),
}
hook.GlobalHookManager.OnScanCompleted().Trigger(completedEvent)
return completedEvent.LocalFiles, nil
}
// +---------------------+
// | File paths |
// +---------------------+
libraryPaths := append([]string{scn.DirPath}, scn.OtherDirPaths...)
// Create a map of local file paths used to avoid duplicates
retrievedPathMap := make(map[string]struct{})
paths := make([]string, 0)
mu := sync.Mutex{}
logMu := sync.Mutex{}
wg := sync.WaitGroup{}
wg.Add(len(libraryPaths))
// Get local files from all directories
for i, dirPath := range libraryPaths {
go func(dirPath string, i int) {
defer wg.Done()
retrievedPaths, err := filesystem.GetMediaFilePathsFromDirS(dirPath)
if err != nil {
scn.Logger.Error().Msgf("scanner: An error occurred while retrieving local files from directory: %s", err)
return
}
if scn.ScanLogger != nil {
logMu.Lock()
if i == 0 {
scn.ScanLogger.logger.Info().
Any("count", len(paths)).
Msgf("Retrieved file paths from main directory: %s", dirPath)
} else {
scn.ScanLogger.logger.Info().
Any("count", len(retrievedPaths)).
Msgf("Retrieved file paths from other directory: %s", dirPath)
}
logMu.Unlock()
}
for _, path := range retrievedPaths {
if _, ok := retrievedPathMap[util.NormalizePath(path)]; !ok {
mu.Lock()
paths = append(paths, path)
mu.Unlock()
}
}
}(dirPath, i)
}
wg.Wait()
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Info().
Any("count", len(paths)).
Msg("Retrieved file paths from all directories")
}
// Invoke ScanFilePathsRetrieved hook
fpEvent := &ScanFilePathsRetrievedEvent{
FilePaths: paths,
}
_ = hook.GlobalHookManager.OnScanFilePathsRetrieved().Trigger(fpEvent)
paths = fpEvent.FilePaths
// +---------------------+
// | Local files |
// +---------------------+
localFiles := make([]*anime.LocalFile, 0)
// Get skipped files depending on options
skippedLfs := make(map[string]*anime.LocalFile)
if (scn.SkipLockedFiles || scn.SkipIgnoredFiles) && scn.ExistingLocalFiles != nil {
// Retrieve skipped files from existing local files
for _, lf := range scn.ExistingLocalFiles {
if scn.SkipLockedFiles && lf.IsLocked() {
skippedLfs[lf.GetNormalizedPath()] = lf
} else if scn.SkipIgnoredFiles && lf.IsIgnored() {
skippedLfs[lf.GetNormalizedPath()] = lf
}
}
}
// Create local files from paths (skipping skipped files)
localFiles = lop.Map(paths, func(path string, _ int) *anime.LocalFile {
if _, ok := skippedLfs[util.NormalizePath(path)]; !ok {
// Create a new local file
return anime.NewLocalFileS(path, libraryPaths)
} else {
return nil
}
})
// Remove nil values
localFiles = lo.Filter(localFiles, func(lf *anime.LocalFile, _ int) bool {
return lf != nil
})
// Invoke ScanLocalFilesParsed hook
parsedEvent := &ScanLocalFilesParsedEvent{
LocalFiles: localFiles,
}
_ = hook.GlobalHookManager.OnScanLocalFilesParsed().Trigger(parsedEvent)
localFiles = parsedEvent.LocalFiles
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Debug().
Any("count", len(localFiles)).
Msg("Local files to be scanned")
scn.ScanLogger.logger.Debug().
Any("count", len(skippedLfs)).
Msg("Skipped files")
scn.ScanLogger.logger.Debug().
Msg("===========================================================================================================")
}
for _, lf := range localFiles {
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Trace().
Str("path", lf.Path).
Str("filename", lf.Name).
Interface("parsedData", lf.ParsedData).
Interface("parsedFolderData", lf.ParsedFolderData).
Msg("Parsed local file")
}
}
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Debug().
Msg("===========================================================================================================")
}
// DEVNOTE: Removed library path checking because it causes some issues with symlinks
// +---------------------+
// | No files to scan |
// +---------------------+
// If there are no local files to scan (all files are skipped, or a file was deleted)
if len(localFiles) == 0 {
scn.WSEventManager.SendEvent(events.EventScanProgress, 90)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Verifying file integrity...")
// Add skipped files
if len(skippedLfs) > 0 {
for _, sf := range skippedLfs {
if filesystem.FileExists(sf.Path) { // Verify that the file still exists
localFiles = append(localFiles, sf)
}
}
}
scn.Logger.Debug().Msg("scanner: Scan completed")
scn.WSEventManager.SendEvent(events.EventScanProgress, 100)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Scan completed")
// Invoke ScanCompleted hook
completedEvent := &ScanCompletedEvent{
LocalFiles: localFiles,
Duration: int(time.Since(startTime).Milliseconds()),
}
hook.GlobalHookManager.OnScanCompleted().Trigger(completedEvent)
localFiles = completedEvent.LocalFiles
return localFiles, nil
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 20)
if scn.Enhanced {
scn.WSEventManager.SendEvent(events.EventScanStatus, "Fetching media detected from file titles...")
} else {
scn.WSEventManager.SendEvent(events.EventScanStatus, "Fetching media...")
}
// +---------------------+
// | MediaFetcher |
// +---------------------+
// Fetch media needed for matching
mf, err := NewMediaFetcher(ctx, &MediaFetcherOptions{
Enhanced: scn.Enhanced,
Platform: scn.Platform,
MetadataProvider: scn.MetadataProvider,
LocalFiles: localFiles,
CompleteAnimeCache: completeAnimeCache,
Logger: scn.Logger,
AnilistRateLimiter: anilistRateLimiter,
DisableAnimeCollection: false,
ScanLogger: scn.ScanLogger,
})
if err != nil {
return nil, err
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 40)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Matching local files...")
// +---------------------+
// | MediaContainer |
// +---------------------+
// Create a new container for media
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: mf.AllMedia,
ScanLogger: scn.ScanLogger,
})
scn.Logger.Debug().
Any("count", len(mc.NormalizedMedia)).
Msg("media container: Media container created")
// +---------------------+
// | Matcher |
// +---------------------+
// Create a new matcher
matcher := &Matcher{
LocalFiles: localFiles,
MediaContainer: mc,
CompleteAnimeCache: completeAnimeCache,
Logger: scn.Logger,
ScanLogger: scn.ScanLogger,
ScanSummaryLogger: scn.ScanSummaryLogger,
Algorithm: scn.MatchingAlgorithm,
Threshold: scn.MatchingThreshold,
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 60)
err = matcher.MatchLocalFilesWithMedia()
if err != nil {
// If the matcher received no local files, return an error
if errors.Is(err, ErrNoLocalFiles) {
scn.Logger.Debug().Msg("scanner: Scan completed")
scn.WSEventManager.SendEvent(events.EventScanProgress, 100)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Scan completed")
}
return nil, err
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 70)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Hydrating metadata...")
// +---------------------+
// | FileHydrator |
// +---------------------+
// Create a new hydrator
hydrator := &FileHydrator{
AllMedia: mc.NormalizedMedia,
LocalFiles: localFiles,
MetadataProvider: scn.MetadataProvider,
Platform: scn.Platform,
CompleteAnimeCache: completeAnimeCache,
AnilistRateLimiter: anilistRateLimiter,
Logger: scn.Logger,
ScanLogger: scn.ScanLogger,
ScanSummaryLogger: scn.ScanSummaryLogger,
}
hydrator.HydrateMetadata()
scn.WSEventManager.SendEvent(events.EventScanProgress, 80)
// +---------------------+
// | Add missing media |
// +---------------------+
// Add non-added media entries to AniList collection
// Max of 4 to avoid rate limit issues
if len(mf.UnknownMediaIds) < 5 {
scn.WSEventManager.SendEvent(events.EventScanStatus, "Adding missing media to AniList...")
if err = scn.Platform.AddMediaToCollection(ctx, mf.UnknownMediaIds); err != nil {
scn.Logger.Warn().Msg("scanner: An error occurred while adding media to planning list: " + err.Error())
}
}
scn.WSEventManager.SendEvent(events.EventScanProgress, 90)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Verifying file integrity...")
// Hydrate the summary logger before merging files
scn.ScanSummaryLogger.HydrateData(localFiles, mc.NormalizedMedia, mf.AnimeCollectionWithRelations)
// +---------------------+
// | Merge files |
// +---------------------+
// Merge skipped files with scanned files
// Only files that exist (this removes deleted/moved files)
if len(skippedLfs) > 0 {
wg := sync.WaitGroup{}
mu := sync.Mutex{}
wg.Add(len(skippedLfs))
for _, skippedLf := range skippedLfs {
go func(skippedLf *anime.LocalFile) {
defer wg.Done()
if filesystem.FileExists(skippedLf.Path) {
mu.Lock()
localFiles = append(localFiles, skippedLf)
mu.Unlock()
}
}(skippedLf)
}
wg.Wait()
}
scn.Logger.Info().Msg("scanner: Scan completed")
scn.WSEventManager.SendEvent(events.EventScanProgress, 100)
scn.WSEventManager.SendEvent(events.EventScanStatus, "Scan completed")
if scn.ScanLogger != nil {
scn.ScanLogger.logger.Info().
Int("count", len(localFiles)).
Int("unknownMediaCount", len(mf.UnknownMediaIds)).
Msg("Scan completed")
}
// Invoke ScanCompleted hook
completedEvent := &ScanCompletedEvent{
LocalFiles: localFiles,
Duration: int(time.Since(startTime).Milliseconds()),
}
hook.GlobalHookManager.OnScanCompleted().Trigger(completedEvent)
localFiles = completedEvent.LocalFiles
return localFiles, nil
}

View File

@@ -0,0 +1,107 @@
package scanner
import (
"bytes"
"fmt"
"os"
"path/filepath"
"time"
"github.com/rs/zerolog"
)
// ScanLogger is a custom logger struct for scanning operations.
type ScanLogger struct {
logger *zerolog.Logger
logFile *os.File
buffer *bytes.Buffer
}
// NewScanLogger creates a new ScanLogger with a log file named based on the current datetime.
// - dir: The directory to save the log file in. This should come from the config.
func NewScanLogger(outputDir string) (*ScanLogger, error) {
// Generate a log file name with the current datetime
logFileName := fmt.Sprintf("%s-scan.log", time.Now().Format("2006-01-02_15-04-05"))
// Create the logs directory if it doesn't exist
if _, err := os.Stat(outputDir); os.IsNotExist(err) {
err := os.Mkdir(outputDir, 0755)
if err != nil {
return nil, err
}
}
// Open the log file for writing
logFile, err := os.OpenFile(filepath.Join(outputDir, logFileName), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return nil, err
}
// Create a buffer for storing log entries
buffer := new(bytes.Buffer)
// Create an array writer to wrap the JSON encoder
logger := zerolog.New(buffer).With().Logger()
return &ScanLogger{&logger, logFile, buffer}, nil
}
// NewConsoleScanLogger creates a new mock ScanLogger
func NewConsoleScanLogger() (*ScanLogger, error) {
output := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: time.DateTime,
}
// Create an array writer to wrap the JSON encoder
logger := zerolog.New(output).With().Logger()
return &ScanLogger{logger: &logger, logFile: nil, buffer: nil}, nil
}
func (sl *ScanLogger) LogMediaContainer(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "MediaContainer")
}
func (sl *ScanLogger) LogMatcher(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "Matcher")
}
func (sl *ScanLogger) LogFileHydrator(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "FileHydrator")
}
func (sl *ScanLogger) LogMediaFetcher(level zerolog.Level) *zerolog.Event {
return sl.logger.WithLevel(level).Str("context", "MediaFetcher")
}
// Done flushes the buffer to the log file and closes the file.
func (sl *ScanLogger) Done() error {
if sl.logFile == nil {
return nil
}
// Write buffer contents to the log file
_, err := sl.logFile.Write(sl.buffer.Bytes())
if err != nil {
return err
}
// Sync and close the log file
err = sl.logFile.Sync()
if err != nil {
return err
}
return sl.logFile.Close()
}
func (sl *ScanLogger) Close() {
if sl.logFile == nil {
return
}
err := sl.logFile.Sync()
if err != nil {
return
}
}

View File

@@ -0,0 +1,124 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
)
func TestScanLogger(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
animeCollection, err := anilistPlatform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal(err.Error())
}
allMedia := animeCollection.GetAllAnime()
metadataProvider := metadata.GetMockProvider(t)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
tests := []struct {
name string
paths []string
expectedMediaId int
}{
{
name: "should be hydrated with id 131586",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
expectedMediaId: 131586, // 86 - Eighty Six Part 2
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanLogger, err := NewScanLogger("./logs")
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, "E:/Anime")
lfs = append(lfs, lf)
}
// +---------------------+
// | MediaContainer |
// +---------------------+
mc := NewMediaContainer(&MediaContainerOptions{
AllMedia: allMedia,
ScanLogger: scanLogger,
})
for _, nm := range mc.NormalizedMedia {
t.Logf("media id: %d, title: %s", nm.ID, nm.GetTitleSafe())
}
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: completeAnimeCache,
Logger: util.NewLogger(),
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
}
err = matcher.MatchLocalFilesWithMedia()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
// +---------------------+
// | FileHydrator |
// +---------------------+
fh := FileHydrator{
LocalFiles: lfs,
AllMedia: mc.NormalizedMedia,
CompleteAnimeCache: completeAnimeCache,
Platform: anilistPlatform,
MetadataProvider: metadataProvider,
AnilistRateLimiter: anilistRateLimiter,
Logger: logger,
ScanLogger: scanLogger,
ScanSummaryLogger: nil,
ForceMediaId: 0,
}
fh.HydrateMetadata()
for _, lf := range fh.LocalFiles {
if lf.MediaId != tt.expectedMediaId {
t.Fatalf("expected media id %d, got %d", tt.expectedMediaId, lf.MediaId)
}
t.Logf("local file: %s,\nmedia id: %d\n", lf.Name, lf.MediaId)
}
})
}
}

View File

@@ -0,0 +1,79 @@
package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/events"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
//----------------------------------------------------------------------------------------------------------------------
func TestScanner_Scan(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
wsEventManager := events.NewMockWSEventManager(util.NewLogger())
dir := "E:/Anime"
tests := []struct {
name string
paths []string
}{
{
name: "Scan",
paths: []string{
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 20v2 (1080p) [30072859].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 21v2 (1080p) [4B1616A5].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 22v2 (1080p) [58BF43B4].mkv",
"E:/Anime/[SubsPlease] 86 - Eighty Six (01-23) (1080p) [Batch]/[SubsPlease] 86 - Eighty Six - 23v2 (1080p) [D94B4894].mkv",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
existingLfs := make([]*anime.LocalFile, 0)
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, dir)
existingLfs = append(existingLfs, lf)
}
// +---------------------+
// | Scan |
// +---------------------+
scanner := &Scanner{
DirPath: dir,
Enhanced: false,
Platform: anilistPlatform,
Logger: util.NewLogger(),
WSEventManager: wsEventManager,
ExistingLocalFiles: existingLfs,
SkipLockedFiles: false,
SkipIgnoredFiles: false,
ScanLogger: nil,
ScanSummaryLogger: nil,
}
lfs, err := scanner.Scan(t.Context())
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
for _, lf := range lfs {
t.Log(lf.Name)
}
})
}
}

View File

@@ -0,0 +1,116 @@
package scanner
import (
"os"
"path/filepath"
"seanime/internal/events"
"strings"
"github.com/fsnotify/fsnotify"
"github.com/rs/zerolog"
)
// Watcher is a custom file system event watcher
type Watcher struct {
Watcher *fsnotify.Watcher
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
TotalSize string
}
type NewWatcherOptions struct {
Logger *zerolog.Logger
WSEventManager events.WSEventManagerInterface
}
// NewWatcher creates a new Watcher instance for monitoring a directory and its subdirectories
func NewWatcher(opts *NewWatcherOptions) (*Watcher, error) {
watcher, err := fsnotify.NewWatcher()
if err != nil {
return nil, err
}
return &Watcher{
Watcher: watcher,
Logger: opts.Logger,
WSEventManager: opts.WSEventManager,
}, nil
}
//----------------------------------------------------------------------------------------------------------------------
type WatchLibraryFilesOptions struct {
LibraryPaths []string
}
// InitLibraryFileWatcher starts watching the specified directory and its subdirectories for file system events
func (w *Watcher) InitLibraryFileWatcher(opts *WatchLibraryFilesOptions) error {
// Define a function to add directories and their subdirectories to the watcher
watchDir := func(dir string) error {
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
if info.IsDir() {
return w.Watcher.Add(path)
}
return nil
})
return err
}
// Add the initial directory and its subdirectories to the watcher
for _, path := range opts.LibraryPaths {
if err := watchDir(path); err != nil {
return err
}
}
w.Logger.Info().Msgf("watcher: Watching directories: %+v", opts.LibraryPaths)
return nil
}
func (w *Watcher) StartWatching(
onFileAction func(),
) {
// Start a goroutine to handle file system events
go func() {
for {
select {
case event, ok := <-w.Watcher.Events:
if !ok {
return
}
//if event.Op&fsnotify.Write == fsnotify.Write {
//}
if strings.Contains(event.Name, ".part") || strings.Contains(event.Name, ".tmp") {
continue
}
if event.Op&fsnotify.Create == fsnotify.Create {
w.Logger.Debug().Msgf("watcher: File created: %s", event.Name)
w.WSEventManager.SendEvent(events.LibraryWatcherFileAdded, event.Name)
onFileAction()
}
if event.Op&fsnotify.Remove == fsnotify.Remove {
w.Logger.Debug().Msgf("watcher: File removed: %s", event.Name)
w.WSEventManager.SendEvent(events.LibraryWatcherFileRemoved, event.Name)
onFileAction()
}
case err, ok := <-w.Watcher.Errors:
if !ok {
return
}
w.Logger.Warn().Err(err).Msgf("watcher: Error while watching directory")
}
}
}()
}
func (w *Watcher) StopWatching() {
err := w.Watcher.Close()
if err == nil {
w.Logger.Trace().Err(err).Msgf("watcher: Watcher stopped")
}
}

View File

@@ -0,0 +1,363 @@
package summary
import (
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/library/anime"
"time"
"github.com/google/uuid"
)
const (
LogFileNotMatched LogType = iota
LogComparison
LogSuccessfullyMatched
LogFailedMatch
LogMatchValidated
LogUnmatched
LogMetadataMediaTreeFetched
LogMetadataMediaTreeFetchFailed
LogMetadataEpisodeNormalized
LogMetadataEpisodeNormalizationFailed
LogMetadataEpisodeZero
LogMetadataNC
LogMetadataSpecial
LogMetadataMain
LogMetadataHydrated
LogPanic
LogDebug
)
type (
LogType int
ScanSummaryLogger struct {
Logs []*ScanSummaryLog
LocalFiles []*anime.LocalFile
AllMedia []*anime.NormalizedMedia
AnimeCollection *anilist.AnimeCollectionWithRelations
}
ScanSummaryLog struct { // Holds a log entry. The log entry will then be used to generate a ScanSummary.
ID string `json:"id"`
FilePath string `json:"filePath"`
Level string `json:"level"`
Message string `json:"message"`
}
ScanSummary struct {
ID string `json:"id"`
Groups []*ScanSummaryGroup `json:"groups"`
UnmatchedFiles []*ScanSummaryFile `json:"unmatchedFiles"`
}
ScanSummaryFile struct {
ID string `json:"id"`
LocalFile *anime.LocalFile `json:"localFile"`
Logs []*ScanSummaryLog `json:"logs"`
}
ScanSummaryGroup struct {
ID string `json:"id"`
Files []*ScanSummaryFile `json:"files"`
MediaId int `json:"mediaId"`
MediaTitle string `json:"mediaTitle"`
MediaImage string `json:"mediaImage"`
MediaIsInCollection bool `json:"mediaIsInCollection"` // Whether the media is in the user's AniList collection
}
ScanSummaryItem struct { // Database item
CreatedAt time.Time `json:"createdAt"`
ScanSummary *ScanSummary `json:"scanSummary"`
}
)
func NewScanSummaryLogger() *ScanSummaryLogger {
return &ScanSummaryLogger{
Logs: make([]*ScanSummaryLog, 0),
}
}
// HydrateData will hydrate the data needed to generate the summary.
func (l *ScanSummaryLogger) HydrateData(lfs []*anime.LocalFile, media []*anime.NormalizedMedia, animeCollection *anilist.AnimeCollectionWithRelations) {
l.LocalFiles = lfs
l.AllMedia = media
l.AnimeCollection = animeCollection
}
func (l *ScanSummaryLogger) GenerateSummary() *ScanSummary {
if l == nil || l.LocalFiles == nil || l.AllMedia == nil || l.AnimeCollection == nil {
return nil
}
summary := &ScanSummary{
ID: uuid.NewString(),
Groups: make([]*ScanSummaryGroup, 0),
UnmatchedFiles: make([]*ScanSummaryFile, 0),
}
groupsMap := make(map[int][]*ScanSummaryFile)
// Generate summary files
for _, lf := range l.LocalFiles {
if lf.MediaId == 0 {
summary.UnmatchedFiles = append(summary.UnmatchedFiles, &ScanSummaryFile{
ID: uuid.NewString(),
LocalFile: lf,
Logs: l.getFileLogs(lf),
})
continue
}
summaryFile := &ScanSummaryFile{
ID: uuid.NewString(),
LocalFile: lf,
Logs: l.getFileLogs(lf),
}
//summary.Files = append(summary.Files, summaryFile)
// Add to group
if _, ok := groupsMap[lf.MediaId]; !ok {
groupsMap[lf.MediaId] = make([]*ScanSummaryFile, 0)
groupsMap[lf.MediaId] = append(groupsMap[lf.MediaId], summaryFile)
} else {
groupsMap[lf.MediaId] = append(groupsMap[lf.MediaId], summaryFile)
}
}
// Generate summary groups
for mediaId, files := range groupsMap {
mediaTitle := ""
mediaImage := ""
mediaIsInCollection := false
for _, m := range l.AllMedia {
if m.ID == mediaId {
mediaTitle = m.GetPreferredTitle()
mediaImage = ""
if m.GetCoverImage() != nil && m.GetCoverImage().GetLarge() != nil {
mediaImage = *m.GetCoverImage().GetLarge()
}
break
}
}
if _, found := l.AnimeCollection.GetListEntryFromMediaId(mediaId); found {
mediaIsInCollection = true
}
summary.Groups = append(summary.Groups, &ScanSummaryGroup{
ID: uuid.NewString(),
Files: files,
MediaId: mediaId,
MediaTitle: mediaTitle,
MediaImage: mediaImage,
MediaIsInCollection: mediaIsInCollection,
})
}
return summary
}
func (l *ScanSummaryLogger) LogComparison(lf *anime.LocalFile, algo string, bestTitle string, ratingType string, rating string) {
if l == nil {
return
}
msg := fmt.Sprintf("Comparison using %s. Best title: \"%s\". %s: %s", algo, bestTitle, ratingType, rating)
l.logType(LogComparison, lf, msg)
}
func (l *ScanSummaryLogger) LogSuccessfullyMatched(lf *anime.LocalFile, mediaId int) {
if l == nil {
return
}
msg := fmt.Sprintf("Successfully matched to media %d", mediaId)
l.logType(LogSuccessfullyMatched, lf, msg)
}
func (l *ScanSummaryLogger) LogPanic(lf *anime.LocalFile, stackTrace string) {
if l == nil {
return
}
//msg := fmt.Sprintf("Panic occurred, please report this issue on the GitHub repository with the stack trace printed in the terminal")
l.logType(LogPanic, lf, "PANIC! "+stackTrace)
}
func (l *ScanSummaryLogger) LogFailedMatch(lf *anime.LocalFile, reason string) {
if l == nil {
return
}
msg := fmt.Sprintf("Failed to match: %s", reason)
l.logType(LogFailedMatch, lf, msg)
}
func (l *ScanSummaryLogger) LogMatchValidated(lf *anime.LocalFile, mediaId int) {
if l == nil {
return
}
msg := fmt.Sprintf("Match validated for media %d", mediaId)
l.logType(LogMatchValidated, lf, msg)
}
func (l *ScanSummaryLogger) LogUnmatched(lf *anime.LocalFile, reason string) {
if l == nil {
return
}
msg := fmt.Sprintf("Unmatched: %s", reason)
l.logType(LogUnmatched, lf, msg)
}
func (l *ScanSummaryLogger) LogFileNotMatched(lf *anime.LocalFile, reason string) {
if l == nil {
return
}
msg := fmt.Sprintf("Not matched: %s", reason)
l.logType(LogFileNotMatched, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataMediaTreeFetched(lf *anime.LocalFile, ms int64, branches int) {
if l == nil {
return
}
msg := fmt.Sprintf("Media tree fetched in %dms. Branches: %d", ms, branches)
l.logType(LogMetadataMediaTreeFetched, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataMediaTreeFetchFailed(lf *anime.LocalFile, err error, ms int64) {
if l == nil {
return
}
msg := fmt.Sprintf("Could not fetch media tree: %s. Took %dms", err.Error(), ms)
l.logType(LogMetadataMediaTreeFetchFailed, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataEpisodeNormalized(lf *anime.LocalFile, mediaId int, episode int, newEpisode int, newMediaId int, aniDBEpisode string) {
if l == nil {
return
}
msg := fmt.Sprintf("Episode %d normalized to %d. New media ID: %d. AniDB episode: %s", episode, newEpisode, newMediaId, aniDBEpisode)
l.logType(LogMetadataEpisodeNormalized, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataEpisodeNormalizationFailed(lf *anime.LocalFile, err error, episode int, aniDBEpisode string) {
if l == nil {
return
}
msg := fmt.Sprintf("Episode normalization failed. Reason \"%s\". Episode %d. AniDB episode %s", err.Error(), episode, aniDBEpisode)
l.logType(LogMetadataEpisodeNormalizationFailed, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataNC(lf *anime.LocalFile) {
if l == nil {
return
}
msg := fmt.Sprintf("Marked as NC file")
l.logType(LogMetadataNC, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataSpecial(lf *anime.LocalFile, episode int, aniDBEpisode string) {
if l == nil {
return
}
msg := fmt.Sprintf("Marked as Special episode. Episode %d. AniDB episode: %s", episode, aniDBEpisode)
l.logType(LogMetadataSpecial, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataMain(lf *anime.LocalFile, episode int, aniDBEpisode string) {
if l == nil {
return
}
msg := fmt.Sprintf("Marked as main episode. Episode %d. AniDB episode: %s", episode, aniDBEpisode)
l.logType(LogMetadataMain, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataEpisodeZero(lf *anime.LocalFile, episode int, aniDBEpisode string) {
if l == nil {
return
}
msg := fmt.Sprintf("Marked as main episode. Episode %d. AniDB episode set to %s assuming AniDB does not include episode 0 in the episode count.", episode, aniDBEpisode)
l.logType(LogMetadataEpisodeZero, lf, msg)
}
func (l *ScanSummaryLogger) LogMetadataHydrated(lf *anime.LocalFile, mediaId int) {
if l == nil {
return
}
msg := fmt.Sprintf("Metadata hydrated for media %d", mediaId)
l.logType(LogMetadataHydrated, lf, msg)
}
func (l *ScanSummaryLogger) LogDebug(lf *anime.LocalFile, message string) {
if l == nil {
return
}
l.log(lf, "info", message)
}
func (l *ScanSummaryLogger) logType(logType LogType, lf *anime.LocalFile, message string) {
if l == nil {
return
}
switch logType {
case LogComparison:
l.log(lf, "info", message)
case LogSuccessfullyMatched:
l.log(lf, "info", message)
case LogFailedMatch:
l.log(lf, "warning", message)
case LogMatchValidated:
l.log(lf, "info", message)
case LogUnmatched:
l.log(lf, "warning", message)
case LogMetadataMediaTreeFetched:
l.log(lf, "info", message)
case LogMetadataMediaTreeFetchFailed:
l.log(lf, "error", message)
case LogMetadataEpisodeNormalized:
l.log(lf, "info", message)
case LogMetadataEpisodeNormalizationFailed:
l.log(lf, "error", message)
case LogMetadataHydrated:
l.log(lf, "info", message)
case LogMetadataNC:
l.log(lf, "info", message)
case LogMetadataSpecial:
l.log(lf, "info", message)
case LogMetadataMain:
l.log(lf, "info", message)
case LogMetadataEpisodeZero:
l.log(lf, "warning", message)
case LogFileNotMatched:
l.log(lf, "warning", message)
case LogPanic:
l.log(lf, "error", message)
case LogDebug:
l.log(lf, "info", message)
}
}
func (l *ScanSummaryLogger) log(lf *anime.LocalFile, level string, message string) {
if l == nil {
return
}
l.Logs = append(l.Logs, &ScanSummaryLog{
ID: uuid.NewString(),
FilePath: lf.Path,
Level: level,
Message: message,
})
}
func (l *ScanSummaryLogger) getFileLogs(lf *anime.LocalFile) []*ScanSummaryLog {
logs := make([]*ScanSummaryLog, 0)
if l == nil {
return logs
}
for _, log := range l.Logs {
if lf.HasSamePath(log.FilePath) {
logs = append(logs, log)
}
}
return logs
}