node build fixed

This commit is contained in:
ra_ma
2025-09-20 14:08:38 +01:00
parent c6ebbe069d
commit 3d298fa434
1516 changed files with 535727 additions and 2 deletions

View File

@@ -0,0 +1,83 @@
<p align="center">
<img src="../docs/images/logo_2.png" alt="preview" width="150px"/>
</p>
<h2 align="center"><b>Seanime Server</b></h2>
- `api`: Third-party APIs
- `anilist`: AniList structs and methods
- `anizip`: Metadata API
- `filler`: Filler API
- `listsync`
- `mal`: MyAnimeList API
- `mappings`: Mapping API
- `metadata`: **Metadata module** for anime
- `tvdb`: TheTVDB API
- `constants`: Version, keys
- `core`
- `app.go`: **Shared app struct**
- `config.go`: Configuration
- `extensions.go`: Load built-in extensions
- `fiber.go`: HTTP server
- `watcher.go`: Library watcher
- `cron`: Background tasks
- `database`
- `db`: **Database module**
- `db_bridge`: Helper methods to avoid circular dependencies
- `models`: Database models
- `debrid`: **Debrid module**
- `debrid`: Structs and interfaces
- `client`: **Debrid repository** for streaming, download
- `torbox`
- `realdebrid`
- `discordrpc`: Discord RPC
- `client`
- `ipc`
- `presence`: **Discord Rich Presence module**
- `events`: **Websocket Event Manager module** and constants
- `extensions`: Structs and interfaces
- `extension_playground`: **Extension Playground module**
- `extension_repo`: **Extension Repository module**
- `handlers`: API handlers
- `library`
- `anime`: Library structs and methods
- `autodownloader` **Auto downloader module**
- `autoscanner`: **Auto scanner module**
- `filesystem`: File system methods
- `playbackmanager`: **Playback Manager module** for progress tracking
- `scanner`: **Scanner module**
- `summary`: Scan summary
- `manga`: Manga structs and **Manga Downloader module**
- `downloader`: Chapter downloader structs and methods
- `providers`: Online provider structs and methods
- `mediaplayers`
- `mediaplayer`: **Media Player Repository** module
- `mpchc`
- `mpv`
- `mpvipc`
- `vlc`
- `mediastream`: **Media Stream Repository** module
- `transcoder`: Transcoder
- `videofile`: Media metadata
- `notifier`
- `onlinestream`: **Onlinestream module**
- `providers`: Stream providers
- `sources`: Video server sources
- `platforms`
- `platform`: Platform structs and methods
- `anilist_platform`
- `local_platform`
- `test_utils`: Test methods
- `torrentstream`: **Torrent Stream Repository** module
- `sync`: **Sync/Offline module**
- `test_utils`: Test methods
- `torrent_clients`
- `torrent_client`: **Torrent Client Repository** module
- `qbittorrent`
- `transmission`
- `torrents`
- `analyzer`: Scan and identify torrent files
- `animetosho`
- `nyaa`
- `seadex`
- `torrent`: Torrent structs and methods

View File

@@ -0,0 +1,14 @@
model:
filename: ./models_gen.go
client:
filename: ./client_gen.go
models:
DateTime:
model: github.com/99designs/gqlgen/graphql.Time
endpoint:
url: https://graphql.anilist.co
query:
- "./queries/*.graphql"
generate:
clientV2: true
clientInterfaceName: "GithubGraphQLClient"

View File

@@ -0,0 +1,407 @@
package anilist
import (
"compress/gzip"
"context"
"errors"
"fmt"
"io"
"net/http"
"seanime/internal/events"
"seanime/internal/util"
"strconv"
"time"
"github.com/Yamashou/gqlgenc/clientv2"
"github.com/Yamashou/gqlgenc/graphqljson"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
var (
// ErrNotAuthenticated is returned when trying to access an Anilist API endpoint that requires authentication,
// but the client is not authenticated.
ErrNotAuthenticated = errors.New("not authenticated")
)
type AnilistClient interface {
IsAuthenticated() bool
AnimeCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollection, error)
AnimeCollectionWithRelations(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollectionWithRelations, error)
BaseAnimeByMalID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByMalID, error)
BaseAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByID, error)
SearchBaseAnimeByIds(ctx context.Context, ids []*int, page *int, perPage *int, status []*MediaStatus, inCollection *bool, sort []*MediaSort, season *MediaSeason, year *int, genre *string, format *MediaFormat, interceptors ...clientv2.RequestInterceptor) (*SearchBaseAnimeByIds, error)
CompleteAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*CompleteAnimeByID, error)
AnimeDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*AnimeDetailsByID, error)
ListAnime(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, season *MediaSeason, seasonYear *int, format *MediaFormat, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListAnime, error)
ListRecentAnime(ctx context.Context, page *int, perPage *int, airingAtGreater *int, airingAtLesser *int, notYetAired *bool, interceptors ...clientv2.RequestInterceptor) (*ListRecentAnime, error)
UpdateMediaListEntry(ctx context.Context, mediaID *int, status *MediaListStatus, scoreRaw *int, progress *int, startedAt *FuzzyDateInput, completedAt *FuzzyDateInput, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntry, error)
UpdateMediaListEntryProgress(ctx context.Context, mediaID *int, progress *int, status *MediaListStatus, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryProgress, error)
UpdateMediaListEntryRepeat(ctx context.Context, mediaID *int, repeat *int, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryRepeat, error)
DeleteEntry(ctx context.Context, mediaListEntryID *int, interceptors ...clientv2.RequestInterceptor) (*DeleteEntry, error)
MangaCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*MangaCollection, error)
SearchBaseManga(ctx context.Context, page *int, perPage *int, sort []*MediaSort, search *string, status []*MediaStatus, interceptors ...clientv2.RequestInterceptor) (*SearchBaseManga, error)
BaseMangaByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseMangaByID, error)
MangaDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*MangaDetailsByID, error)
ListManga(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, startDateGreater *string, startDateLesser *string, format *MediaFormat, countryOfOrigin *string, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListManga, error)
ViewerStats(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ViewerStats, error)
StudioDetails(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*StudioDetails, error)
GetViewer(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*GetViewer, error)
AnimeAiringSchedule(ctx context.Context, ids []*int, season *MediaSeason, seasonYear *int, previousSeason *MediaSeason, previousSeasonYear *int, nextSeason *MediaSeason, nextSeasonYear *int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringSchedule, error)
AnimeAiringScheduleRaw(ctx context.Context, ids []*int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringScheduleRaw, error)
}
type (
// AnilistClientImpl is a wrapper around the AniList API client.
AnilistClientImpl struct {
Client *Client
logger *zerolog.Logger
token string // The token used for authentication with the AniList API
}
)
// NewAnilistClient creates a new AnilistClientImpl with the given token.
// The token is used for authorization when making requests to the AniList API.
func NewAnilistClient(token string) *AnilistClientImpl {
ac := &AnilistClientImpl{
token: token,
Client: &Client{
Client: clientv2.NewClient(http.DefaultClient, "https://graphql.anilist.co", nil,
func(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}, next clientv2.RequestInterceptorFunc) error {
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
if len(token) > 0 {
req.Header.Set("Authorization", "Bearer "+token)
}
return next(ctx, req, gqlInfo, res)
}),
},
logger: util.NewLogger(),
}
ac.Client.Client.CustomDo = ac.customDoFunc
return ac
}
func (ac *AnilistClientImpl) IsAuthenticated() bool {
if ac.Client == nil || ac.Client.Client == nil {
return false
}
if len(ac.token) == 0 {
return false
}
// If the token is not empty, we are authenticated
return true
}
////////////////////////////////
// Authenticated
////////////////////////////////
func (ac *AnilistClientImpl) UpdateMediaListEntry(ctx context.Context, mediaID *int, status *MediaListStatus, scoreRaw *int, progress *int, startedAt *FuzzyDateInput, completedAt *FuzzyDateInput, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntry, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry")
return ac.Client.UpdateMediaListEntry(ctx, mediaID, status, scoreRaw, progress, startedAt, completedAt, interceptors...)
}
func (ac *AnilistClientImpl) UpdateMediaListEntryProgress(ctx context.Context, mediaID *int, progress *int, status *MediaListStatus, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryProgress, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry progress")
return ac.Client.UpdateMediaListEntryProgress(ctx, mediaID, progress, status, interceptors...)
}
func (ac *AnilistClientImpl) UpdateMediaListEntryRepeat(ctx context.Context, mediaID *int, repeat *int, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryRepeat, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry repeat")
return ac.Client.UpdateMediaListEntryRepeat(ctx, mediaID, repeat, interceptors...)
}
func (ac *AnilistClientImpl) DeleteEntry(ctx context.Context, mediaListEntryID *int, interceptors ...clientv2.RequestInterceptor) (*DeleteEntry, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("entryId", *mediaListEntryID).Msg("anilist: Deleting media list entry")
return ac.Client.DeleteEntry(ctx, mediaListEntryID, interceptors...)
}
func (ac *AnilistClientImpl) AnimeCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollection, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching anime collection")
return ac.Client.AnimeCollection(ctx, userName, interceptors...)
}
func (ac *AnilistClientImpl) AnimeCollectionWithRelations(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollectionWithRelations, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching anime collection with relations")
return ac.Client.AnimeCollectionWithRelations(ctx, userName, interceptors...)
}
func (ac *AnilistClientImpl) GetViewer(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*GetViewer, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching viewer")
return ac.Client.GetViewer(ctx, interceptors...)
}
func (ac *AnilistClientImpl) MangaCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*MangaCollection, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching manga collection")
return ac.Client.MangaCollection(ctx, userName, interceptors...)
}
func (ac *AnilistClientImpl) ViewerStats(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ViewerStats, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching stats")
return ac.Client.ViewerStats(ctx, interceptors...)
}
////////////////////////////////
// Not authenticated
////////////////////////////////
func (ac *AnilistClientImpl) BaseAnimeByMalID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByMalID, error) {
return ac.Client.BaseAnimeByMalID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) BaseAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching anime")
return ac.Client.BaseAnimeByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) AnimeDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*AnimeDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching anime details")
return ac.Client.AnimeDetailsByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) CompleteAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*CompleteAnimeByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching complete media")
return ac.Client.CompleteAnimeByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) ListAnime(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, season *MediaSeason, seasonYear *int, format *MediaFormat, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching media list")
return ac.Client.ListAnime(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, season, seasonYear, format, isAdult, interceptors...)
}
func (ac *AnilistClientImpl) ListRecentAnime(ctx context.Context, page *int, perPage *int, airingAtGreater *int, airingAtLesser *int, notYetAired *bool, interceptors ...clientv2.RequestInterceptor) (*ListRecentAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching recent media list")
return ac.Client.ListRecentAnime(ctx, page, perPage, airingAtGreater, airingAtLesser, notYetAired, interceptors...)
}
func (ac *AnilistClientImpl) SearchBaseManga(ctx context.Context, page *int, perPage *int, sort []*MediaSort, search *string, status []*MediaStatus, interceptors ...clientv2.RequestInterceptor) (*SearchBaseManga, error) {
ac.logger.Debug().Msg("anilist: Searching manga")
return ac.Client.SearchBaseManga(ctx, page, perPage, sort, search, status, interceptors...)
}
func (ac *AnilistClientImpl) BaseMangaByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseMangaByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga")
return ac.Client.BaseMangaByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) MangaDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*MangaDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga details")
return ac.Client.MangaDetailsByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) ListManga(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, startDateGreater *string, startDateLesser *string, format *MediaFormat, countryOfOrigin *string, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListManga, error) {
ac.logger.Debug().Msg("anilist: Fetching manga list")
return ac.Client.ListManga(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, startDateGreater, startDateLesser, format, countryOfOrigin, isAdult, interceptors...)
}
func (ac *AnilistClientImpl) StudioDetails(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*StudioDetails, error) {
ac.logger.Debug().Int("studioId", *id).Msg("anilist: Fetching studio details")
return ac.Client.StudioDetails(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) SearchBaseAnimeByIds(ctx context.Context, ids []*int, page *int, perPage *int, status []*MediaStatus, inCollection *bool, sort []*MediaSort, season *MediaSeason, year *int, genre *string, format *MediaFormat, interceptors ...clientv2.RequestInterceptor) (*SearchBaseAnimeByIds, error) {
ac.logger.Debug().Msg("anilist: Searching anime by ids")
return ac.Client.SearchBaseAnimeByIds(ctx, ids, page, perPage, status, inCollection, sort, season, year, genre, format, interceptors...)
}
func (ac *AnilistClientImpl) AnimeAiringSchedule(ctx context.Context, ids []*int, season *MediaSeason, seasonYear *int, previousSeason *MediaSeason, previousSeasonYear *int, nextSeason *MediaSeason, nextSeasonYear *int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringSchedule, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.Client.AnimeAiringSchedule(ctx, ids, season, seasonYear, previousSeason, previousSeasonYear, nextSeason, nextSeasonYear, interceptors...)
}
func (ac *AnilistClientImpl) AnimeAiringScheduleRaw(ctx context.Context, ids []*int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringScheduleRaw, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.Client.AnimeAiringScheduleRaw(ctx, ids, interceptors...)
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var sentRateLimitWarningTime = time.Now().Add(-10 * time.Second)
// customDoFunc is a custom request interceptor function that handles rate limiting and retries.
func (ac *AnilistClientImpl) customDoFunc(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}) (err error) {
var rlRemainingStr string
reqTime := time.Now()
defer func() {
timeSince := time.Since(reqTime)
formattedDur := timeSince.Truncate(time.Millisecond).String()
if err != nil {
ac.logger.Error().Str("duration", formattedDur).Str("rlr", rlRemainingStr).Err(err).Msg("anilist: Failed Request")
} else {
if timeSince > 900*time.Millisecond {
ac.logger.Warn().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Successful Request (slow)")
} else {
ac.logger.Info().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Successful Request")
}
}
}()
client := http.DefaultClient
var resp *http.Response
retryCount := 2
for i := 0; i < retryCount; i++ {
// Reset response body for retry
if resp != nil && resp.Body != nil {
resp.Body.Close()
}
// Recreate the request body if it was read in a previous attempt
if req.GetBody != nil {
newBody, err := req.GetBody()
if err != nil {
return fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
rlRetryAfterStr := resp.Header.Get("Retry-After")
//println("Remaining:", rlRemainingStr, " | RetryAfter:", rlRetryAfterStr)
// If we have a rate limit, sleep for the time
rlRetryAfter, err := strconv.Atoi(rlRetryAfterStr)
if err == nil {
ac.logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", rlRetryAfter+1)
if time.Since(sentRateLimitWarningTime) > 10*time.Second {
events.GlobalWSEventManager.SendEvent(events.WarningToast, "anilist: Rate limited, retrying in "+strconv.Itoa(rlRetryAfter+1)+" seconds")
sentRateLimitWarningTime = time.Now()
}
select {
case <-time.After(time.Duration(rlRetryAfter+1) * time.Second):
continue
}
}
if rlRemainingStr == "" {
select {
case <-time.After(5 * time.Second):
continue
}
}
break
}
defer resp.Body.Close()
if resp.Header.Get("Content-Encoding") == "gzip" {
resp.Body, err = gzip.NewReader(resp.Body)
if err != nil {
return fmt.Errorf("gzip decode failed: %w", err)
}
}
var body []byte
body, err = io.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("failed to read response body: %w", err)
}
err = parseResponse(body, resp.StatusCode, res)
return
}
func parseResponse(body []byte, httpCode int, result interface{}) error {
errResponse := &clientv2.ErrorResponse{}
isKOCode := httpCode < 200 || 299 < httpCode
if isKOCode {
errResponse.NetworkError = &clientv2.HTTPError{
Code: httpCode,
Message: fmt.Sprintf("Response body %s", string(body)),
}
}
// some servers return a graphql error with a non OK http code, try anyway to parse the body
if err := unmarshal(body, result); err != nil {
var gqlErr *clientv2.GqlErrorList
if errors.As(err, &gqlErr) {
errResponse.GqlErrors = &gqlErr.Errors
} else if !isKOCode {
return err
}
}
if errResponse.HasErrors() {
return errResponse
}
return nil
}
// response is a GraphQL layer response from a handler.
type response struct {
Data json.RawMessage `json:"data"`
Errors json.RawMessage `json:"errors"`
}
func unmarshal(data []byte, res interface{}) error {
ParseDataWhenErrors := false
resp := response{}
if err := json.Unmarshal(data, &resp); err != nil {
return fmt.Errorf("failed to decode data %s: %w", string(data), err)
}
var err error
if resp.Errors != nil && len(resp.Errors) > 0 {
// try to parse standard graphql error
err = &clientv2.GqlErrorList{}
if e := json.Unmarshal(data, err); e != nil {
return fmt.Errorf("faild to parse graphql errors. Response content %s - %w", string(data), e)
}
// if ParseDataWhenErrors is true, try to parse data as well
if !ParseDataWhenErrors {
return err
}
}
if errData := graphqljson.UnmarshalData(resp.Data, res); errData != nil {
// if ParseDataWhenErrors is true, and we failed to unmarshal data, return the actual error
if ParseDataWhenErrors {
return err
}
return fmt.Errorf("failed to decode data into response %s: %w", string(data), errData)
}
return err
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,569 @@
package anilist
import (
"context"
"log"
"os"
"seanime/internal/test_utils"
"seanime/internal/util"
"github.com/Yamashou/gqlgenc/clientv2"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
// This file contains helper functions for testing the anilist package
func TestGetMockAnilistClient() AnilistClient {
return NewMockAnilistClient()
}
// MockAnilistClientImpl is a mock implementation of the AnilistClient, used for tests.
// It uses the real implementation of the AnilistClient to make requests then populates a cache with the results.
// This is to avoid making repeated requests to the AniList API during tests but still have realistic data.
type MockAnilistClientImpl struct {
realAnilistClient AnilistClient
logger *zerolog.Logger
}
func NewMockAnilistClient() *MockAnilistClientImpl {
return &MockAnilistClientImpl{
realAnilistClient: NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt),
logger: util.NewLogger(),
}
}
func (ac *MockAnilistClientImpl) IsAuthenticated() bool {
return ac.realAnilistClient.IsAuthenticated()
}
func (ac *MockAnilistClientImpl) BaseAnimeByMalID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByMalID, error) {
file, err := os.Open(test_utils.GetTestDataPath("BaseAnimeByMalID"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByMalID]: %d", *id)
ret, err := ac.realAnilistClient.BaseAnimeByMalID(context.Background(), id)
if err != nil {
return nil, err
}
data, err := json.Marshal([]*BaseAnimeByMalID{ret})
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByMalID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
}
var media []*BaseAnimeByMalID
err = json.NewDecoder(file).Decode(&media)
if err != nil {
log.Fatal(err)
}
var ret *BaseAnimeByMalID
for _, m := range media {
if m.GetMedia().ID == *id {
ret = m
break
}
}
if ret == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByMalID]: %d", *id)
ret, err := ac.realAnilistClient.BaseAnimeByMalID(context.Background(), id)
if err != nil {
return nil, err
}
media = append(media, ret)
data, err := json.Marshal(media)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByMalID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [BaseAnimeByMalID]: %d", *id)
return ret, nil
}
func (ac *MockAnilistClientImpl) BaseAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByID, error) {
file, err := os.Open(test_utils.GetTestDataPath("BaseAnimeByID"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByID]: %d", *id)
baseAnime, err := ac.realAnilistClient.BaseAnimeByID(context.Background(), id)
if err != nil {
return nil, err
}
data, err := json.Marshal([]*BaseAnimeByID{baseAnime})
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return baseAnime, nil
}
}
var media []*BaseAnimeByID
err = json.NewDecoder(file).Decode(&media)
if err != nil {
log.Fatal(err)
}
var baseAnime *BaseAnimeByID
for _, m := range media {
if m.GetMedia().ID == *id {
baseAnime = m
break
}
}
if baseAnime == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByID]: %d", *id)
baseAnime, err := ac.realAnilistClient.BaseAnimeByID(context.Background(), id)
if err != nil {
return nil, err
}
media = append(media, baseAnime)
data, err := json.Marshal(media)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return baseAnime, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [BaseAnimeByID]: %d", *id)
return baseAnime, nil
}
// AnimeCollection
// - Set userName to nil to use the boilerplate AnimeCollection
// - Set userName to a specific username to fetch and cache
func (ac *MockAnilistClientImpl) AnimeCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollection, error) {
if userName == nil {
file, err := os.Open(test_utils.GetDataPath("BoilerplateAnimeCollection"))
defer file.Close()
var ret *AnimeCollection
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: Using [BoilerplateAnimeCollection]")
return ret, nil
}
file, err := os.Open(test_utils.GetTestDataPath("AnimeCollection"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollection]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollection(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollection"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
}
var ret *AnimeCollection
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
if ret == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollection]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollection(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollection"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [AnimeCollection]: %s", *userName)
return ret, nil
}
func (ac *MockAnilistClientImpl) AnimeCollectionWithRelations(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollectionWithRelations, error) {
if userName == nil {
file, err := os.Open(test_utils.GetDataPath("BoilerplateAnimeCollectionWithRelations"))
defer file.Close()
var ret *AnimeCollectionWithRelations
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: Using [BoilerplateAnimeCollectionWithRelations]")
return ret, nil
}
file, err := os.Open(test_utils.GetTestDataPath("AnimeCollectionWithRelations"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollectionWithRelations]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollectionWithRelations(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollectionWithRelations"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
}
var ret *AnimeCollectionWithRelations
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
if ret == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollectionWithRelations]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollectionWithRelations(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollectionWithRelations"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [AnimeCollectionWithRelations]: %s", *userName)
return ret, nil
}
type TestModifyAnimeCollectionEntryInput struct {
Status *MediaListStatus
Progress *int
Score *float64
AiredEpisodes *int
NextAiringEpisode *BaseAnime_NextAiringEpisode
}
// TestModifyAnimeCollectionEntry will modify an entry in the fetched anime collection.
// This is used to fine-tune the anime collection for testing purposes.
//
// Example: Setting a specific progress in case the origin anime collection has no progress
func TestModifyAnimeCollectionEntry(ac *AnimeCollection, mId int, input TestModifyAnimeCollectionEntryInput) *AnimeCollection {
if ac == nil {
panic("AnimeCollection is nil")
}
lists := ac.GetMediaListCollection().GetLists()
removedFromList := false
var rEntry *AnimeCollection_MediaListCollection_Lists_Entries
// Move the entry to the correct list
if input.Status != nil {
for _, list := range lists {
if list.Status == nil || list.Entries == nil {
continue
}
entries := list.GetEntries()
for idx, entry := range entries {
if entry.GetMedia().ID == mId {
// Remove from current list if status differs
if *list.Status != *input.Status {
removedFromList = true
rEntry = entry
// Ensure we're not going out of bounds
if idx >= 0 && idx < len(entries) {
// Safely remove the entry by re-slicing
list.Entries = append(entries[:idx], entries[idx+1:]...)
}
break
}
}
}
}
// Add the entry to the correct list if it was removed
if removedFromList && rEntry != nil {
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.Status == *input.Status {
if list.Entries == nil {
list.Entries = make([]*AnimeCollection_MediaListCollection_Lists_Entries, 0)
}
// Add the removed entry to the new list
list.Entries = append(list.Entries, rEntry)
break
}
}
}
}
// Update the entry details
out:
for _, list := range lists {
entries := list.GetEntries()
for _, entry := range entries {
if entry.GetMedia().ID == mId {
if input.Status != nil {
entry.Status = input.Status
}
if input.Progress != nil {
entry.Progress = input.Progress
}
if input.Score != nil {
entry.Score = input.Score
}
if input.AiredEpisodes != nil {
entry.Media.Episodes = input.AiredEpisodes
}
if input.NextAiringEpisode != nil {
entry.Media.NextAiringEpisode = input.NextAiringEpisode
}
break out
}
}
}
return ac
}
func TestAddAnimeCollectionEntry(ac *AnimeCollection, mId int, input TestModifyAnimeCollectionEntryInput, realClient AnilistClient) *AnimeCollection {
if ac == nil {
panic("AnimeCollection is nil")
}
// Fetch the anime details
baseAnime, err := realClient.BaseAnimeByID(context.Background(), &mId)
if err != nil {
log.Fatal(err)
}
anime := baseAnime.GetMedia()
if input.NextAiringEpisode != nil {
anime.NextAiringEpisode = input.NextAiringEpisode
}
if input.AiredEpisodes != nil {
anime.Episodes = input.AiredEpisodes
}
lists := ac.GetMediaListCollection().GetLists()
// Add the entry to the correct list
if input.Status != nil {
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.Status == *input.Status {
if list.Entries == nil {
list.Entries = make([]*AnimeCollection_MediaListCollection_Lists_Entries, 0)
}
list.Entries = append(list.Entries, &AnimeCollection_MediaListCollection_Lists_Entries{
Media: baseAnime.GetMedia(),
Status: input.Status,
Progress: input.Progress,
Score: input.Score,
})
break
}
}
}
return ac
}
func TestAddAnimeCollectionWithRelationsEntry(ac *AnimeCollectionWithRelations, mId int, input TestModifyAnimeCollectionEntryInput, realClient AnilistClient) *AnimeCollectionWithRelations {
if ac == nil {
panic("AnimeCollection is nil")
}
// Fetch the anime details
baseAnime, err := realClient.CompleteAnimeByID(context.Background(), &mId)
if err != nil {
log.Fatal(err)
}
anime := baseAnime.GetMedia()
//if input.NextAiringEpisode != nil {
// anime.NextAiringEpisode = input.NextAiringEpisode
//}
if input.AiredEpisodes != nil {
anime.Episodes = input.AiredEpisodes
}
lists := ac.GetMediaListCollection().GetLists()
// Add the entry to the correct list
if input.Status != nil {
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.Status == *input.Status {
if list.Entries == nil {
list.Entries = make([]*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, 0)
}
list.Entries = append(list.Entries, &AnimeCollectionWithRelations_MediaListCollection_Lists_Entries{
Media: baseAnime.GetMedia(),
Status: input.Status,
Progress: input.Progress,
Score: input.Score,
})
break
}
}
}
return ac
}
//
// WILL NOT IMPLEMENT
//
func (ac *MockAnilistClientImpl) UpdateMediaListEntry(ctx context.Context, mediaID *int, status *MediaListStatus, scoreRaw *int, progress *int, startedAt *FuzzyDateInput, completedAt *FuzzyDateInput, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntry, error) {
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry")
return &UpdateMediaListEntry{}, nil
}
func (ac *MockAnilistClientImpl) UpdateMediaListEntryProgress(ctx context.Context, mediaID *int, progress *int, status *MediaListStatus, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryProgress, error) {
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry progress")
return &UpdateMediaListEntryProgress{}, nil
}
func (ac *MockAnilistClientImpl) UpdateMediaListEntryRepeat(ctx context.Context, mediaID *int, repeat *int, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryRepeat, error) {
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry repeat")
return &UpdateMediaListEntryRepeat{}, nil
}
func (ac *MockAnilistClientImpl) DeleteEntry(ctx context.Context, mediaListEntryID *int, interceptors ...clientv2.RequestInterceptor) (*DeleteEntry, error) {
ac.logger.Debug().Int("entryId", *mediaListEntryID).Msg("anilist: Deleting media list entry")
return &DeleteEntry{}, nil
}
func (ac *MockAnilistClientImpl) AnimeDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*AnimeDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching anime details")
return ac.realAnilistClient.AnimeDetailsByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) CompleteAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*CompleteAnimeByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching complete media")
return ac.realAnilistClient.CompleteAnimeByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) ListAnime(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, season *MediaSeason, seasonYear *int, format *MediaFormat, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching media list")
return ac.realAnilistClient.ListAnime(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, season, seasonYear, format, isAdult, interceptors...)
}
func (ac *MockAnilistClientImpl) ListRecentAnime(ctx context.Context, page *int, perPage *int, airingAtGreater *int, airingAtLesser *int, notYetAired *bool, interceptors ...clientv2.RequestInterceptor) (*ListRecentAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching recent media list")
return ac.realAnilistClient.ListRecentAnime(ctx, page, perPage, airingAtGreater, airingAtLesser, notYetAired, interceptors...)
}
func (ac *MockAnilistClientImpl) GetViewer(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*GetViewer, error) {
ac.logger.Debug().Msg("anilist: Fetching viewer")
return ac.realAnilistClient.GetViewer(ctx, interceptors...)
}
func (ac *MockAnilistClientImpl) MangaCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*MangaCollection, error) {
ac.logger.Debug().Msg("anilist: Fetching manga collection")
return ac.realAnilistClient.MangaCollection(ctx, userName, interceptors...)
}
func (ac *MockAnilistClientImpl) SearchBaseManga(ctx context.Context, page *int, perPage *int, sort []*MediaSort, search *string, status []*MediaStatus, interceptors ...clientv2.RequestInterceptor) (*SearchBaseManga, error) {
ac.logger.Debug().Msg("anilist: Searching manga")
return ac.realAnilistClient.SearchBaseManga(ctx, page, perPage, sort, search, status, interceptors...)
}
func (ac *MockAnilistClientImpl) BaseMangaByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseMangaByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga")
return ac.realAnilistClient.BaseMangaByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) MangaDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*MangaDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga details")
return ac.realAnilistClient.MangaDetailsByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) ListManga(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, startDateGreater *string, startDateLesser *string, format *MediaFormat, countryOfOrigin *string, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListManga, error) {
ac.logger.Debug().Msg("anilist: Fetching manga list")
return ac.realAnilistClient.ListManga(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, startDateGreater, startDateLesser, format, countryOfOrigin, isAdult, interceptors...)
}
func (ac *MockAnilistClientImpl) StudioDetails(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*StudioDetails, error) {
ac.logger.Debug().Int("studioId", *id).Msg("anilist: Fetching studio details")
return ac.realAnilistClient.StudioDetails(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) ViewerStats(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ViewerStats, error) {
ac.logger.Debug().Msg("anilist: Fetching stats")
return ac.realAnilistClient.ViewerStats(ctx, interceptors...)
}
func (ac *MockAnilistClientImpl) SearchBaseAnimeByIds(ctx context.Context, ids []*int, page *int, perPage *int, status []*MediaStatus, inCollection *bool, sort []*MediaSort, season *MediaSeason, year *int, genre *string, format *MediaFormat, interceptors ...clientv2.RequestInterceptor) (*SearchBaseAnimeByIds, error) {
ac.logger.Debug().Msg("anilist: Searching anime by ids")
return ac.realAnilistClient.SearchBaseAnimeByIds(ctx, ids, page, perPage, status, inCollection, sort, season, year, genre, format, interceptors...)
}
func (ac *MockAnilistClientImpl) AnimeAiringSchedule(ctx context.Context, ids []*int, season *MediaSeason, seasonYear *int, previousSeason *MediaSeason, previousSeasonYear *int, nextSeason *MediaSeason, nextSeasonYear *int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringSchedule, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.realAnilistClient.AnimeAiringSchedule(ctx, ids, season, seasonYear, previousSeason, previousSeasonYear, nextSeason, nextSeasonYear, interceptors...)
}
func (ac *MockAnilistClientImpl) AnimeAiringScheduleRaw(ctx context.Context, ids []*int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringScheduleRaw, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.realAnilistClient.AnimeAiringScheduleRaw(ctx, ids, interceptors...)
}

View File

@@ -0,0 +1,73 @@
package anilist
import (
"context"
"github.com/goccy/go-json"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"os"
"seanime/internal/test_utils"
"testing"
)
// USE CASE: Generate a boilerplate Anilist AnimeCollection for testing purposes and save it to 'test/data/BoilerplateAnimeCollection'.
// The generated AnimeCollection will have all entries in the 'Planning' status.
// The generated AnimeCollection will be used to test various Anilist API methods.
// You can use TestModifyAnimeCollectionEntry to modify the generated AnimeCollection before using it in a test.
// - DO NOT RUN IF YOU DON'T PLAN TO GENERATE A NEW 'test/data/BoilerplateAnimeCollection'
func TestGenerateBoilerplateAnimeCollection(t *testing.T) {
t.Skip("This test is not meant to be run")
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
ac, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
if assert.NoError(t, err) {
lists := ac.GetMediaListCollection().GetLists()
entriesToAddToPlanning := make([]*AnimeListEntry, 0)
if assert.NoError(t, err) {
for _, list := range lists {
if list.Status != nil {
if list.GetStatus().String() != string(MediaListStatusPlanning) {
entries := list.GetEntries()
for _, entry := range entries {
entry.Progress = lo.ToPtr(0)
entry.Score = lo.ToPtr(0.0)
entry.Status = lo.ToPtr(MediaListStatusPlanning)
entriesToAddToPlanning = append(entriesToAddToPlanning, entry)
}
list.Entries = make([]*AnimeListEntry, 0)
}
}
}
newLists := make([]*AnimeCollection_MediaListCollection_Lists, 0)
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.GetStatus() == MediaListStatusPlanning {
list.Entries = append(list.Entries, entriesToAddToPlanning...)
newLists = append(newLists, list)
} else {
newLists = append(newLists, list)
}
}
ac.MediaListCollection.Lists = newLists
data, err := json.Marshal(ac)
if assert.NoError(t, err) {
err = os.WriteFile(test_utils.GetDataPath("BoilerplateAnimeCollection"), data, 0644)
assert.NoError(t, err)
}
}
}
}

View File

@@ -0,0 +1,237 @@
package anilist
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
//func TestHiddenFromStatus(t *testing.T) {
// test_utils.InitTestProvider(t, test_utils.Anilist())
//
// token := test_utils.ConfigData.Provider.AnilistJwt
// logger := util.NewLogger()
// //anilistClient := NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt)
//
// variables := map[string]interface{}{}
//
// variables["userName"] = test_utils.ConfigData.Provider.AnilistUsername
// variables["type"] = "ANIME"
//
// requestBody, err := json.Marshal(map[string]interface{}{
// "query": testQuery,
// "variables": variables,
// })
// require.NoError(t, err)
//
// data, err := customQuery(requestBody, logger, token)
// require.NoError(t, err)
//
// var mediaLists []*MediaList
//
// type retData struct {
// Page Page
// PageInfo PageInfo
// }
//
// var ret retData
// m, err := json.Marshal(data)
// require.NoError(t, err)
// if err := json.Unmarshal(m, &ret); err != nil {
// t.Fatalf("Failed to unmarshal data: %v", err)
// }
//
// mediaLists = append(mediaLists, ret.Page.MediaList...)
//
// util.Spew(ret.Page.PageInfo)
//
// var currentPage = 1
// var hasNextPage = false
// if ret.Page.PageInfo != nil && ret.Page.PageInfo.HasNextPage != nil {
// hasNextPage = *ret.Page.PageInfo.HasNextPage
// }
// for hasNextPage {
// currentPage++
// variables["page"] = currentPage
// requestBody, err = json.Marshal(map[string]interface{}{
// "query": testQuery,
// "variables": variables,
// })
// require.NoError(t, err)
// data, err = customQuery(requestBody, logger, token)
// require.NoError(t, err)
// m, err = json.Marshal(data)
// require.NoError(t, err)
// if err := json.Unmarshal(m, &ret); err != nil {
// t.Fatalf("Failed to unmarshal data: %v", err)
// }
// util.Spew(ret.Page.PageInfo)
// if ret.Page.PageInfo != nil && ret.Page.PageInfo.HasNextPage != nil {
// hasNextPage = *ret.Page.PageInfo.HasNextPage
// }
// mediaLists = append(mediaLists, ret.Page.MediaList...)
// }
//
// //res, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
// //assert.NoError(t, err)
//
// for _, mediaList := range mediaLists {
// util.Spew(mediaList.Media.ID)
// if mediaList.Media.ID == 151514 {
// util.Spew(mediaList)
// }
// }
//
//}
//
//const testQuery = `query ($page: Int, $userName: String, $type: MediaType) {
// Page (page: $page, perPage: 100) {
// pageInfo {
// hasNextPage
// total
// perPage
// currentPage
// lastPage
// }
// mediaList (type: $type, userName: $userName) {
// status
// startedAt {
// year
// month
// day
// }
// completedAt {
// year
// month
// day
// }
// repeat
// score(format: POINT_100)
// progress
// progressVolumes
// notes
// media {
// siteUrl
// id
// idMal
// episodes
// chapters
// volumes
// status
// averageScore
// coverImage{
// large
// extraLarge
// }
// bannerImage
// title {
// userPreferred
// }
// }
// }
// }
// }`
func TestGetAnimeById(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
tests := []struct {
name string
mediaId int
}{
{
name: "Cowboy Bebop",
mediaId: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
assert.NoError(t, err)
assert.NotNil(t, res)
})
}
}
func TestListAnime(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
tests := []struct {
name string
Page *int
Search *string
PerPage *int
Sort []*MediaSort
Status []*MediaStatus
Genres []*string
AverageScoreGreater *int
Season *MediaSeason
SeasonYear *int
Format *MediaFormat
IsAdult *bool
}{
{
name: "Popular",
Page: lo.ToPtr(1),
Search: nil,
PerPage: lo.ToPtr(20),
Sort: []*MediaSort{lo.ToPtr(MediaSortTrendingDesc)},
Status: nil,
Genres: nil,
AverageScoreGreater: nil,
Season: nil,
SeasonYear: nil,
Format: nil,
IsAdult: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
cacheKey := ListAnimeCacheKey(
tt.Page,
tt.Search,
tt.PerPage,
tt.Sort,
tt.Status,
tt.Genres,
tt.AverageScoreGreater,
tt.Season,
tt.SeasonYear,
tt.Format,
tt.IsAdult,
)
t.Log(cacheKey)
res, err := ListAnimeM(
tt.Page,
tt.Search,
tt.PerPage,
tt.Sort,
tt.Status,
tt.Genres,
tt.AverageScoreGreater,
tt.Season,
tt.SeasonYear,
tt.Format,
tt.IsAdult,
util.NewLogger(),
"",
)
assert.NoError(t, err)
assert.Equal(t, *tt.PerPage, len(res.GetPage().GetMedia()))
spew.Dump(res)
})
}
}

View File

@@ -0,0 +1,248 @@
package anilist
import (
"time"
"github.com/goccy/go-json"
)
type (
AnimeListEntry = AnimeCollection_MediaListCollection_Lists_Entries
AnimeList = AnimeCollection_MediaListCollection_Lists
EntryDate struct {
Year *int `json:"year,omitempty"`
Month *int `json:"month,omitempty"`
Day *int `json:"day,omitempty"`
}
)
func (ac *AnimeCollection) GetListEntryFromAnimeId(id int) (*AnimeListEntry, bool) {
if ac == nil || ac.MediaListCollection == nil {
return nil, false
}
var entry *AnimeCollection_MediaListCollection_Lists_Entries
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == id {
entry = e
break
}
}
}
if entry == nil {
return nil, false
}
return entry, true
}
func (ac *AnimeCollection) GetAllAnime() []*BaseAnime {
if ac == nil {
return make([]*BaseAnime, 0)
}
var ret []*BaseAnime
addedId := make(map[int]bool)
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if _, ok := addedId[e.Media.ID]; !ok {
ret = append(ret, e.Media)
addedId[e.Media.ID] = true
}
}
}
return ret
}
func (ac *AnimeCollection) FindAnime(mediaId int) (*BaseAnime, bool) {
if ac == nil {
return nil, false
}
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == mediaId {
return e.Media, true
}
}
}
return nil, false
}
func (ac *AnimeCollectionWithRelations) GetListEntryFromMediaId(id int) (*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, bool) {
if ac == nil || ac.MediaListCollection == nil {
return nil, false
}
var entry *AnimeCollectionWithRelations_MediaListCollection_Lists_Entries
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == id {
entry = e
break
}
}
}
if entry == nil {
return nil, false
}
return entry, true
}
func (ac *AnimeCollectionWithRelations) GetAllAnime() []*CompleteAnime {
var ret []*CompleteAnime
addedId := make(map[int]bool)
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if _, ok := addedId[e.Media.ID]; !ok {
ret = append(ret, e.Media)
addedId[e.Media.ID] = true
}
}
}
return ret
}
func (ac *AnimeCollectionWithRelations) FindAnime(mediaId int) (*CompleteAnime, bool) {
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == mediaId {
return e.Media, true
}
}
}
return nil, false
}
type IFuzzyDate interface {
GetYear() *int
GetMonth() *int
GetDay() *int
}
func FuzzyDateToString(d IFuzzyDate) string {
if d == nil {
return ""
}
return fuzzyDateToString(d.GetYear(), d.GetMonth(), d.GetDay())
}
func ToEntryStartDate(d *AnimeCollection_MediaListCollection_Lists_Entries_StartedAt) string {
if d == nil {
return ""
}
return fuzzyDateToString(d.GetYear(), d.GetMonth(), d.GetDay())
}
func ToEntryCompletionDate(d *AnimeCollection_MediaListCollection_Lists_Entries_CompletedAt) string {
if d == nil {
return ""
}
return fuzzyDateToString(d.GetYear(), d.GetMonth(), d.GetDay())
}
func fuzzyDateToString(year *int, month *int, day *int) string {
_year := 0
if year != nil {
_year = *year
}
if _year == 0 {
return ""
}
_month := 0
if month != nil {
_month = *month
}
_day := 0
if day != nil {
_day = *day
}
return time.Date(_year, time.Month(_month), _day, 0, 0, 0, 0, time.UTC).Format(time.RFC3339)
}
// AddEntryToList adds an entry to the appropriate list based on the provided status.
// If no list exists with the given status, a new list is created.
func (mc *AnimeCollection_MediaListCollection) AddEntryToList(entry *AnimeCollection_MediaListCollection_Lists_Entries, status MediaListStatus) {
if mc == nil || entry == nil {
return
}
// Initialize Lists slice if nil
if mc.Lists == nil {
mc.Lists = make([]*AnimeCollection_MediaListCollection_Lists, 0)
}
// Find existing list with the target status
for _, list := range mc.Lists {
if list.Status != nil && *list.Status == status {
// Found the list, add the entry
if list.Entries == nil {
list.Entries = make([]*AnimeCollection_MediaListCollection_Lists_Entries, 0)
}
list.Entries = append(list.Entries, entry)
return
}
}
// No list found with the target status, create a new one
newList := &AnimeCollection_MediaListCollection_Lists{
Status: &status,
Entries: []*AnimeCollection_MediaListCollection_Lists_Entries{entry},
}
mc.Lists = append(mc.Lists, newList)
}
func (ac *AnimeCollection) Copy() *AnimeCollection {
if ac == nil {
return nil
}
marshaled, err := json.Marshal(ac)
if err != nil {
return nil
}
var copy AnimeCollection
err = json.Unmarshal(marshaled, &copy)
if err != nil {
return nil
}
return &copy
}
func (ac *AnimeList) CopyT() *AnimeCollection_MediaListCollection_Lists {
if ac == nil {
return nil
}
marshaled, err := json.Marshal(ac)
if err != nil {
return nil
}
var copy AnimeCollection_MediaListCollection_Lists
err = json.Unmarshal(marshaled, &copy)
if err != nil {
return nil
}
return &copy
}

View File

@@ -0,0 +1,115 @@
package anilist
import (
"fmt"
"github.com/goccy/go-json"
"seanime/internal/util"
"strconv"
)
func FetchBaseAnimeMap(ids []int) (ret map[int]*BaseAnime, err error) {
query := fmt.Sprintf(CompoundBaseAnimeDocument, newCompoundQuery(ids))
requestBody, err := json.Marshal(map[string]interface{}{
"query": query,
"variables": nil,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, util.NewLogger())
if err != nil {
return nil, err
}
var res map[string]*BaseAnime
dataB, err := json.Marshal(data)
if err != nil {
return nil, err
}
err = json.Unmarshal(dataB, &res)
if err != nil {
return nil, err
}
ret = make(map[int]*BaseAnime)
for k, v := range res {
id, err := strconv.Atoi(k[1:])
if err != nil {
return nil, err
}
ret[id] = v
}
return ret, nil
}
func newCompoundQuery(ids []int) string {
var query string
for _, id := range ids {
query += fmt.Sprintf(`
t%d: Media(id: %d) {
...baseAnime
}
`, id, id)
}
return query
}
const CompoundBaseAnimeDocument = `query CompoundQueryTest {
%s
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}`

View File

@@ -0,0 +1,95 @@
package anilist
import (
"fmt"
"github.com/davecgh/go-spew/spew"
"github.com/goccy/go-json"
"github.com/stretchr/testify/require"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestCompoundQuery(t *testing.T) {
test_utils.InitTestProvider(t)
var ids = []int{171457, 21}
query := fmt.Sprintf(compoundQueryFormatTest, newCompoundQuery(ids))
t.Log(query)
requestBody, err := json.Marshal(map[string]interface{}{
"query": query,
"variables": nil,
})
require.NoError(t, err)
data, err := customQuery(requestBody, util.NewLogger())
require.NoError(t, err)
var res map[string]*BaseAnime
dataB, err := json.Marshal(data)
require.NoError(t, err)
err = json.Unmarshal(dataB, &res)
require.NoError(t, err)
spew.Dump(res)
}
const compoundQueryFormatTest = `query CompoundQueryTest {
%s
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}`

View File

@@ -0,0 +1,140 @@
package anilist
import (
"bytes"
"compress/gzip"
"errors"
"fmt"
"net/http"
"seanime/internal/util"
"strconv"
"time"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
func CustomQuery(body map[string]interface{}, logger *zerolog.Logger, token string) (data interface{}, err error) {
bodyBytes, err := json.Marshal(body)
if err != nil {
return nil, err
}
return customQuery(bodyBytes, logger, token)
}
func customQuery(body []byte, logger *zerolog.Logger, token ...string) (data interface{}, err error) {
var rlRemainingStr string
reqTime := time.Now()
defer func() {
timeSince := time.Since(reqTime)
formattedDur := timeSince.Truncate(time.Millisecond).String()
if err != nil {
logger.Error().Str("duration", formattedDur).Str("rlr", rlRemainingStr).Err(err).Msg("anilist: Failed Request")
} else {
if timeSince > 600*time.Millisecond {
logger.Warn().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Long Request")
} else {
logger.Trace().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Successful Request")
}
}
}()
defer util.HandlePanicInModuleThen("api/anilist/custom_query", func() {
err = errors.New("panic in customQuery")
})
client := http.DefaultClient
var req *http.Request
req, err = http.NewRequest("POST", "https://graphql.anilist.co", bytes.NewBuffer(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
if len(token) > 0 && token[0] != "" {
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token[0]))
}
// Send request
retryCount := 2
var resp *http.Response
for i := 0; i < retryCount; i++ {
// Reset response body for retry
if resp != nil && resp.Body != nil {
resp.Body.Close()
}
// Recreate the request body if it was read in a previous attempt
if req.GetBody != nil {
newBody, err := req.GetBody()
if err != nil {
return nil, fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
rlRetryAfterStr := resp.Header.Get("Retry-After")
rlRetryAfter, err := strconv.Atoi(rlRetryAfterStr)
if err == nil {
logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", rlRetryAfter+1)
select {
case <-time.After(time.Duration(rlRetryAfter+1) * time.Second):
continue
}
}
if rlRemainingStr == "" {
select {
case <-time.After(5 * time.Second):
continue
}
}
break
}
defer resp.Body.Close()
if resp.Header.Get("Content-Encoding") == "gzip" {
resp.Body, err = gzip.NewReader(resp.Body)
if err != nil {
return nil, fmt.Errorf("gzip decode failed: %w", err)
}
}
var res interface{}
err = json.NewDecoder(resp.Body).Decode(&res)
if err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
var ok bool
reqErrors, ok := res.(map[string]interface{})["errors"].([]interface{})
if ok && len(reqErrors) > 0 {
firstError, foundErr := reqErrors[0].(map[string]interface{})
if foundErr {
return nil, errors.New(firstError["message"].(string))
}
}
data, ok = res.(map[string]interface{})["data"]
if !ok {
return nil, errors.New("failed to parse data")
}
return data, nil
}

View File

@@ -0,0 +1,27 @@
package anilist
//import (
//)
//
//func TestFuzzyDate(t *testing.T) {
//
// date := "2006-01-02T15:04:05Z"
//
// parsedDate, err := time.Parse(time.RFC3339, date)
// if err != nil {
// t.Fatal(err)
// }
//
// year := parsedDate.Year()
// month := int(parsedDate.Month())
// day := parsedDate.Day()
// t.Logf("Year: %d, Month: %d, Day: %d", year, month, day)
//
//}
//
//func TestDateTransformation(t *testing.T) {
//
// t.Logf(time.Date(2024, time.Month(1), 1, 0, 0, 0, 0, time.Local).UTC().Format(time.RFC3339))
//
//}

View File

@@ -0,0 +1,50 @@
package anilist
import (
"context"
"errors"
"github.com/rs/zerolog"
"seanime/internal/util/limiter"
"sync"
)
func (c *Client) AddMediaToPlanning(mIds []int, rateLimiter *limiter.Limiter, logger *zerolog.Logger) error {
if len(mIds) == 0 {
logger.Debug().Msg("anilist: No media added to planning list")
return nil
}
if rateLimiter == nil {
return errors.New("anilist: no rate limiter provided")
}
status := MediaListStatusPlanning
scoreRaw := 0
progress := 0
wg := sync.WaitGroup{}
for _, _id := range mIds {
wg.Add(1)
go func(id int) {
rateLimiter.Wait()
defer wg.Done()
_, err := c.UpdateMediaListEntry(
context.Background(),
&id,
&status,
&scoreRaw,
&progress,
nil,
nil,
)
if err != nil {
logger.Error().Msg("anilist: An error occurred while adding media to planning list: " + err.Error())
}
}(_id)
}
wg.Wait()
logger.Debug().Any("count", len(mIds)).Msg("anilist: Media added to planning list")
return nil
}

View File

@@ -0,0 +1,19 @@
package anilist
import "seanime/internal/hook_resolver"
// ListMissedSequelsRequestedEvent is triggered when the list missed sequels request is requested.
// Prevent default to skip the default behavior and return your own data.
type ListMissedSequelsRequestedEvent struct {
hook_resolver.Event
AnimeCollectionWithRelations *AnimeCollectionWithRelations `json:"animeCollectionWithRelations"`
Variables map[string]interface{} `json:"variables"`
Query string `json:"query"`
// Empty data object, will be used if the hook prevents the default behavior
List []*BaseAnime `json:"list"`
}
type ListMissedSequelsEvent struct {
hook_resolver.Event
List []*BaseAnime `json:"list"`
}

View File

@@ -0,0 +1,529 @@
package anilist
import (
"fmt"
"seanime/internal/hook"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
"github.com/samber/lo"
)
func ListMissedSequels(
animeCollectionWithRelations *AnimeCollectionWithRelations,
logger *zerolog.Logger,
token string,
) (ret []*BaseAnime, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("panic: %v", r)
}
}()
variables := map[string]interface{}{}
variables["page"] = 1
variables["perPage"] = 50
ids := make(map[int]struct{})
for _, list := range animeCollectionWithRelations.GetMediaListCollection().GetLists() {
if list.Status == nil || !(*list.Status == MediaListStatusCompleted || *list.Status == MediaListStatusRepeating || *list.Status == MediaListStatusPaused) || list.Entries == nil {
continue
}
for _, entry := range list.Entries {
if _, ok := ids[entry.GetMedia().GetID()]; !ok {
edges := entry.GetMedia().GetRelations().GetEdges()
var sequel *BaseAnime
for _, edge := range edges {
if edge.GetRelationType() != nil && *edge.GetRelationType() == MediaRelationSequel {
sequel = edge.GetNode()
break
}
}
if sequel == nil {
continue
}
// Check if sequel is already in the list
_, found := animeCollectionWithRelations.FindAnime(sequel.GetID())
if found {
continue
}
if *sequel.GetStatus() == MediaStatusFinished || *sequel.GetStatus() == MediaStatusReleasing {
ids[sequel.GetID()] = struct{}{}
}
}
}
}
idsSlice := make([]int, 0, len(ids))
for id := range ids {
idsSlice = append(idsSlice, id)
}
if len(idsSlice) == 0 {
return []*BaseAnime{}, nil
}
if len(idsSlice) > 10 {
idsSlice = idsSlice[:10]
}
variables["ids"] = idsSlice
variables["inCollection"] = false
variables["sort"] = MediaSortStartDateDesc
// Event
reqEvent := &ListMissedSequelsRequestedEvent{
AnimeCollectionWithRelations: animeCollectionWithRelations,
Variables: variables,
List: make([]*BaseAnime, 0),
Query: SearchBaseAnimeByIdsDocument,
}
err = hook.GlobalHookManager.OnListMissedSequelsRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if reqEvent.DefaultPrevented {
return reqEvent.List, nil
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": reqEvent.Query,
"variables": reqEvent.Variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
var searchRes *SearchBaseAnimeByIds
if err := json.Unmarshal(m, &searchRes); err != nil {
return nil, err
}
if searchRes == nil || searchRes.Page == nil || searchRes.Page.Media == nil {
return nil, fmt.Errorf("no data found")
}
// Event
event := &ListMissedSequelsEvent{
List: searchRes.Page.Media,
}
err = hook.GlobalHookManager.OnListMissedSequels().Trigger(event)
if err != nil {
return nil, err
}
return event.List, nil
}
func ListAnimeM(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Season *MediaSeason,
SeasonYear *int,
Format *MediaFormat,
IsAdult *bool,
logger *zerolog.Logger,
token string,
) (*ListAnime, error) {
variables := map[string]interface{}{}
if Page != nil {
variables["page"] = *Page
}
if Search != nil {
variables["search"] = *Search
}
if PerPage != nil {
variables["perPage"] = *PerPage
}
if Sort != nil {
variables["sort"] = Sort
}
if Status != nil {
variables["status"] = Status
}
if Genres != nil {
variables["genres"] = Genres
}
if AverageScoreGreater != nil {
variables["averageScore_greater"] = *AverageScoreGreater
}
if Season != nil {
variables["season"] = *Season
}
if SeasonYear != nil {
variables["seasonYear"] = *SeasonYear
}
if Format != nil {
variables["format"] = *Format
}
if IsAdult != nil {
variables["isAdult"] = *IsAdult
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": ListAnimeDocument,
"variables": variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
var listMediaF ListAnime
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
if err := json.Unmarshal(m, &listMediaF); err != nil {
return nil, err
}
return &listMediaF, nil
}
func ListMangaM(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Year *int,
Format *MediaFormat,
CountryOfOrigin *string,
IsAdult *bool,
logger *zerolog.Logger,
token string,
) (*ListManga, error) {
variables := map[string]interface{}{}
if Page != nil {
variables["page"] = *Page
}
if Search != nil {
variables["search"] = *Search
}
if PerPage != nil {
variables["perPage"] = *PerPage
}
if Sort != nil {
variables["sort"] = Sort
}
if Status != nil {
variables["status"] = Status
}
if Genres != nil {
variables["genres"] = Genres
}
if AverageScoreGreater != nil {
variables["averageScore_greater"] = *AverageScoreGreater * 10
}
if Year != nil {
variables["startDate_greater"] = lo.ToPtr(fmt.Sprintf("%d0000", *Year))
variables["startDate_lesser"] = lo.ToPtr(fmt.Sprintf("%d0000", *Year+1))
}
if Format != nil {
variables["format"] = *Format
}
if CountryOfOrigin != nil {
variables["countryOfOrigin"] = *CountryOfOrigin
}
if IsAdult != nil {
variables["isAdult"] = *IsAdult
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": ListMangaDocument,
"variables": variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
var listMediaF ListManga
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
if err := json.Unmarshal(m, &listMediaF); err != nil {
return nil, err
}
return &listMediaF, nil
}
func ListRecentAiringAnimeM(
Page *int,
Search *string,
PerPage *int,
AiringAtGreater *int,
AiringAtLesser *int,
NotYetAired *bool,
Sort []*AiringSort,
logger *zerolog.Logger,
token string,
) (*ListRecentAnime, error) {
variables := map[string]interface{}{}
if Page != nil {
variables["page"] = *Page
}
if Search != nil {
variables["search"] = *Search
}
if PerPage != nil {
variables["perPage"] = *PerPage
}
if AiringAtGreater != nil {
variables["airingAt_greater"] = *AiringAtGreater
}
if AiringAtLesser != nil {
variables["airingAt_lesser"] = *AiringAtLesser
}
if NotYetAired != nil {
variables["notYetAired"] = *NotYetAired
}
if Sort != nil {
variables["sort"] = Sort
} else {
variables["sort"] = []*AiringSort{lo.ToPtr(AiringSortTimeDesc)}
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": ListRecentAiringAnimeQuery,
"variables": variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
var listMediaF ListRecentAnime
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
if err := json.Unmarshal(m, &listMediaF); err != nil {
return nil, err
}
return &listMediaF, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func ListAnimeCacheKey(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Season *MediaSeason,
SeasonYear *int,
Format *MediaFormat,
IsAdult *bool,
) string {
key := "ListAnime"
if Page != nil {
key += fmt.Sprintf("_%d", *Page)
}
if Search != nil {
key += fmt.Sprintf("_%s", *Search)
}
if PerPage != nil {
key += fmt.Sprintf("_%d", *PerPage)
}
if Sort != nil {
key += fmt.Sprintf("_%v", Sort)
}
if Status != nil {
key += fmt.Sprintf("_%v", Status)
}
if Genres != nil {
key += fmt.Sprintf("_%v", Genres)
}
if AverageScoreGreater != nil {
key += fmt.Sprintf("_%d", *AverageScoreGreater)
}
if Season != nil {
key += fmt.Sprintf("_%s", *Season)
}
if SeasonYear != nil {
key += fmt.Sprintf("_%d", *SeasonYear)
}
if Format != nil {
key += fmt.Sprintf("_%s", *Format)
}
if IsAdult != nil {
key += fmt.Sprintf("_%t", *IsAdult)
}
return key
}
func ListMangaCacheKey(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Season *MediaSeason,
SeasonYear *int,
Format *MediaFormat,
CountryOfOrigin *string,
IsAdult *bool,
) string {
key := "ListAnime"
if Page != nil {
key += fmt.Sprintf("_%d", *Page)
}
if Search != nil {
key += fmt.Sprintf("_%s", *Search)
}
if PerPage != nil {
key += fmt.Sprintf("_%d", *PerPage)
}
if Sort != nil {
key += fmt.Sprintf("_%v", Sort)
}
if Status != nil {
key += fmt.Sprintf("_%v", Status)
}
if Genres != nil {
key += fmt.Sprintf("_%v", Genres)
}
if AverageScoreGreater != nil {
key += fmt.Sprintf("_%d", *AverageScoreGreater)
}
if Season != nil {
key += fmt.Sprintf("_%s", *Season)
}
if SeasonYear != nil {
key += fmt.Sprintf("_%d", *SeasonYear)
}
if Format != nil {
key += fmt.Sprintf("_%s", *Format)
}
if CountryOfOrigin != nil {
key += fmt.Sprintf("_%s", *CountryOfOrigin)
}
if IsAdult != nil {
key += fmt.Sprintf("_%t", *IsAdult)
}
return key
}
const ListRecentAiringAnimeQuery = `query ListRecentAnime ($page: Int, $perPage: Int, $airingAt_greater: Int, $airingAt_lesser: Int, $sort: [AiringSort], $notYetAired: Boolean = false) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
total
perPage
currentPage
lastPage
}
airingSchedules(notYetAired: $notYetAired, sort: $sort, airingAt_greater: $airingAt_greater, airingAt_lesser: $airingAt_lesser) {
id
airingAt
episode
timeUntilAiring
media {
... baseAnime
}
}
}
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}
`

View File

@@ -0,0 +1,123 @@
package anilist
type MangaList = MangaCollection_MediaListCollection_Lists
type MangaListEntry = MangaCollection_MediaListCollection_Lists_Entries
func (ac *MangaCollection) GetListEntryFromMangaId(id int) (*MangaListEntry, bool) {
if ac == nil || ac.MediaListCollection == nil {
return nil, false
}
var entry *MangaCollection_MediaListCollection_Lists_Entries
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == id {
entry = e
break
}
}
}
if entry == nil {
return nil, false
}
return entry, true
}
func (m *BaseManga) GetTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
return "N/A"
}
func (m *BaseManga) GetRomajiTitleSafe() string {
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return "N/A"
}
func (m *BaseManga) GetPreferredTitle() string {
if m.GetTitle().GetUserPreferred() != nil {
return *m.GetTitle().GetUserPreferred()
}
return m.GetTitleSafe()
}
func (m *BaseManga) GetCoverImageSafe() string {
if m.GetCoverImage().GetExtraLarge() != nil {
return *m.GetCoverImage().GetExtraLarge()
}
if m.GetCoverImage().GetLarge() != nil {
return *m.GetCoverImage().GetLarge()
}
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return ""
}
func (m *BaseManga) GetBannerImageSafe() string {
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return m.GetCoverImageSafe()
}
func (m *BaseManga) GetAllTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
titles = append(titles, m.Synonyms...)
}
return titles
}
func (m *BaseManga) GetMainTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
return titles
}
func (m *BaseManga) HasEnglishTitle() bool {
return m.Title.English != nil
}
func (m *BaseManga) HasRomajiTitle() bool {
return m.Title.Romaji != nil
}
func (m *BaseManga) HasSynonyms() bool {
return m.Synonyms != nil
}
func (m *BaseManga) GetStartYearSafe() int {
if m.GetStartDate() != nil && m.GetStartDate().GetYear() != nil {
return *m.GetStartDate().GetYear()
}
return 0
}
func (m *MangaListEntry) GetRepeatSafe() int {
if m.Repeat == nil {
return 0
}
return *m.Repeat
}

View File

@@ -0,0 +1,25 @@
package anilist
import (
"seanime/internal/util/result"
)
type BaseAnimeCache struct {
*result.Cache[int, *BaseAnime]
}
// NewBaseAnimeCache returns a new result.Cache[int, *BaseAnime].
// It is used to temporarily store the results of FetchMediaTree calls.
func NewBaseAnimeCache() *BaseAnimeCache {
return &BaseAnimeCache{result.NewCache[int, *BaseAnime]()}
}
type CompleteAnimeCache struct {
*result.Cache[int, *CompleteAnime]
}
// NewCompleteAnimeCache returns a new result.Cache[int, *CompleteAnime].
// It is used to temporarily store the results of FetchMediaTree calls.
func NewCompleteAnimeCache() *CompleteAnimeCache {
return &CompleteAnimeCache{result.NewCache[int, *CompleteAnime]()}
}

View File

@@ -0,0 +1,574 @@
package anilist
import (
"seanime/internal/util/comparison"
"github.com/samber/lo"
)
func (m *BaseAnime) GetTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
return ""
}
func (m *BaseAnime) GetEnglishTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return ""
}
func (m *BaseAnime) GetRomajiTitleSafe() string {
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return ""
}
func (m *BaseAnime) GetPreferredTitle() string {
if m.GetTitle().GetUserPreferred() != nil {
return *m.GetTitle().GetUserPreferred()
}
return m.GetTitleSafe()
}
func (m *BaseAnime) GetCoverImageSafe() string {
if m.GetCoverImage().GetExtraLarge() != nil {
return *m.GetCoverImage().GetExtraLarge()
}
if m.GetCoverImage().GetLarge() != nil {
return *m.GetCoverImage().GetLarge()
}
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return ""
}
func (m *BaseAnime) GetBannerImageSafe() string {
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return m.GetCoverImageSafe()
}
func (m *BaseAnime) IsMovieOrSingleEpisode() bool {
if m == nil {
return false
}
if m.GetTotalEpisodeCount() == 1 {
return true
}
return false
}
func (m *BaseAnime) GetSynonymsDeref() []string {
if m.Synonyms == nil {
return nil
}
return lo.Map(m.Synonyms, func(s *string, i int) string { return *s })
}
func (m *BaseAnime) GetSynonymsContainingSeason() []string {
if m.Synonyms == nil {
return nil
}
return lo.Filter(lo.Map(m.Synonyms, func(s *string, i int) string { return *s }), func(s string, i int) bool { return comparison.ValueContainsSeason(s) })
}
func (m *BaseAnime) GetStartYearSafe() int {
if m == nil || m.StartDate == nil || m.StartDate.Year == nil {
return 0
}
return *m.StartDate.Year
}
func (m *BaseAnime) IsMovie() bool {
if m == nil {
return false
}
if m.Format == nil {
return false
}
return *m.Format == MediaFormatMovie
}
func (m *BaseAnime) IsFinished() bool {
if m == nil {
return false
}
if m.Status == nil {
return false
}
return *m.Status == MediaStatusFinished
}
func (m *BaseAnime) GetAllTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
titles = append(titles, lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })...)
}
return titles
}
func (m *BaseAnime) GetAllTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
syn := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
for _, s := range syn {
titles = append(titles, *s)
}
}
return titles
}
func (m *BaseAnime) GetMainTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
return titles
}
func (m *BaseAnime) GetMainTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
return titles
}
// GetCurrentEpisodeCount returns the current episode number for that media and -1 if it doesn't have one.
// i.e. -1 is returned if the media has no episodes AND the next airing episode is not set.
func (m *BaseAnime) GetCurrentEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
if m.NextAiringEpisode != nil {
if m.NextAiringEpisode.Episode > 0 {
ceil = m.NextAiringEpisode.Episode - 1
}
}
return ceil
}
func (m *BaseAnime) GetCurrentEpisodeCountOrNil() *int {
n := m.GetCurrentEpisodeCount()
if n == -1 {
return nil
}
return &n
}
// GetTotalEpisodeCount returns the total episode number for that media and -1 if it doesn't have one
func (m *BaseAnime) GetTotalEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
return ceil
}
// GetTotalEpisodeCount returns the total episode number for that media and -1 if it doesn't have one
func (m *BaseAnime) GetTotalEpisodeCountOrNil() *int {
return m.Episodes
}
// GetPossibleSeasonNumber returns the possible season number for that media and -1 if it doesn't have one.
// It looks at the synonyms and returns the highest season number found.
func (m *BaseAnime) GetPossibleSeasonNumber() int {
if m == nil || m.Synonyms == nil || len(m.Synonyms) == 0 {
return -1
}
titles := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
seasons := lo.Map(titles, func(s *string, i int) int { return comparison.ExtractSeasonNumber(*s) })
return lo.Max(seasons)
}
func (m *BaseAnime) HasEnglishTitle() bool {
return m.Title.English != nil
}
func (m *BaseAnime) HasRomajiTitle() bool {
return m.Title.Romaji != nil
}
func (m *BaseAnime) HasSynonyms() bool {
return m.Synonyms != nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *CompleteAnime) GetTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
return "N/A"
}
func (m *CompleteAnime) GetRomajiTitleSafe() string {
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return "N/A"
}
func (m *CompleteAnime) GetPreferredTitle() string {
if m.GetTitle().GetUserPreferred() != nil {
return *m.GetTitle().GetUserPreferred()
}
return m.GetTitleSafe()
}
func (m *CompleteAnime) GetCoverImageSafe() string {
if m.GetCoverImage().GetExtraLarge() != nil {
return *m.GetCoverImage().GetExtraLarge()
}
if m.GetCoverImage().GetLarge() != nil {
return *m.GetCoverImage().GetLarge()
}
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return ""
}
func (m *CompleteAnime) GetBannerImageSafe() string {
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return m.GetCoverImageSafe()
}
func (m *CompleteAnime) IsMovieOrSingleEpisode() bool {
if m == nil {
return false
}
if m.GetTotalEpisodeCount() == 1 {
return true
}
return false
}
func (m *CompleteAnime) IsMovie() bool {
if m == nil {
return false
}
if m.Format == nil {
return false
}
return *m.Format == MediaFormatMovie
}
func (m *CompleteAnime) IsFinished() bool {
if m == nil {
return false
}
if m.Status == nil {
return false
}
return *m.Status == MediaStatusFinished
}
func (m *CompleteAnime) GetAllTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
titles = append(titles, lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })...)
}
return titles
}
func (m *CompleteAnime) GetAllTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
syn := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
for _, s := range syn {
titles = append(titles, *s)
}
}
return titles
}
// GetCurrentEpisodeCount returns the current episode number for that media and -1 if it doesn't have one.
// i.e. -1 is returned if the media has no episodes AND the next airing episode is not set.
func (m *CompleteAnime) GetCurrentEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
if m.NextAiringEpisode != nil {
if m.NextAiringEpisode.Episode > 0 {
ceil = m.NextAiringEpisode.Episode - 1
}
}
return ceil
}
// GetTotalEpisodeCount returns the total episode number for that media and -1 if it doesn't have one
func (m *CompleteAnime) GetTotalEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
return ceil
}
// GetPossibleSeasonNumber returns the possible season number for that media and -1 if it doesn't have one.
// It looks at the synonyms and returns the highest season number found.
func (m *CompleteAnime) GetPossibleSeasonNumber() int {
if m == nil || m.Synonyms == nil || len(m.Synonyms) == 0 {
return -1
}
titles := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
seasons := lo.Map(titles, func(s *string, i int) int { return comparison.ExtractSeasonNumber(*s) })
return lo.Max(seasons)
}
func (m *CompleteAnime) HasEnglishTitle() bool {
return m.Title.English != nil
}
func (m *CompleteAnime) HasRomajiTitle() bool {
return m.Title.Romaji != nil
}
func (m *CompleteAnime) HasSynonyms() bool {
return m.Synonyms != nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var EdgeNarrowFormats = []MediaFormat{MediaFormatTv, MediaFormatTvShort}
var EdgeBroaderFormats = []MediaFormat{MediaFormatTv, MediaFormatTvShort, MediaFormatOna, MediaFormatOva, MediaFormatMovie, MediaFormatSpecial}
func (m *CompleteAnime) FindEdge(relation string, formats []MediaFormat) (*BaseAnime, bool) {
if m.GetRelations() == nil {
return nil, false
}
edges := m.GetRelations().GetEdges()
for _, edge := range edges {
if edge.GetRelationType().String() == relation {
for _, fm := range formats {
if fm.String() == edge.GetNode().GetFormat().String() {
return edge.GetNode(), true
}
}
}
}
return nil, false
}
func (e *CompleteAnime_Relations_Edges) IsBroadRelationFormat() bool {
if e.GetNode() == nil {
return false
}
if e.GetNode().GetFormat() == nil {
return false
}
for _, fm := range EdgeBroaderFormats {
if fm.String() == e.GetNode().GetFormat().String() {
return true
}
}
return false
}
func (e *CompleteAnime_Relations_Edges) IsNarrowRelationFormat() bool {
if e.GetNode() == nil {
return false
}
if e.GetNode().GetFormat() == nil {
return false
}
for _, fm := range EdgeNarrowFormats {
if fm.String() == e.GetNode().GetFormat().String() {
return true
}
}
return false
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *CompleteAnime) ToBaseAnime() *BaseAnime {
if m == nil {
return nil
}
var trailer *BaseAnime_Trailer
if m.GetTrailer() != nil {
trailer = &BaseAnime_Trailer{
ID: m.GetTrailer().GetID(),
Site: m.GetTrailer().GetSite(),
Thumbnail: m.GetTrailer().GetThumbnail(),
}
}
var nextAiringEpisode *BaseAnime_NextAiringEpisode
if m.GetNextAiringEpisode() != nil {
nextAiringEpisode = &BaseAnime_NextAiringEpisode{
AiringAt: m.GetNextAiringEpisode().GetAiringAt(),
TimeUntilAiring: m.GetNextAiringEpisode().GetTimeUntilAiring(),
Episode: m.GetNextAiringEpisode().GetEpisode(),
}
}
var startDate *BaseAnime_StartDate
if m.GetStartDate() != nil {
startDate = &BaseAnime_StartDate{
Year: m.GetStartDate().GetYear(),
Month: m.GetStartDate().GetMonth(),
Day: m.GetStartDate().GetDay(),
}
}
var endDate *BaseAnime_EndDate
if m.GetEndDate() != nil {
endDate = &BaseAnime_EndDate{
Year: m.GetEndDate().GetYear(),
Month: m.GetEndDate().GetMonth(),
Day: m.GetEndDate().GetDay(),
}
}
return &BaseAnime{
ID: m.GetID(),
IDMal: m.GetIDMal(),
SiteURL: m.GetSiteURL(),
Format: m.GetFormat(),
Episodes: m.GetEpisodes(),
Status: m.GetStatus(),
Synonyms: m.GetSynonyms(),
BannerImage: m.GetBannerImage(),
Season: m.GetSeason(),
SeasonYear: m.GetSeasonYear(),
Type: m.GetType(),
IsAdult: m.GetIsAdult(),
CountryOfOrigin: m.GetCountryOfOrigin(),
Genres: m.GetGenres(),
Duration: m.GetDuration(),
Description: m.GetDescription(),
MeanScore: m.GetMeanScore(),
Trailer: trailer,
Title: &BaseAnime_Title{
UserPreferred: m.GetTitle().GetUserPreferred(),
Romaji: m.GetTitle().GetRomaji(),
English: m.GetTitle().GetEnglish(),
Native: m.GetTitle().GetNative(),
},
CoverImage: &BaseAnime_CoverImage{
ExtraLarge: m.GetCoverImage().GetExtraLarge(),
Large: m.GetCoverImage().GetLarge(),
Medium: m.GetCoverImage().GetMedium(),
Color: m.GetCoverImage().GetColor(),
},
StartDate: startDate,
EndDate: endDate,
NextAiringEpisode: nextAiringEpisode,
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *AnimeListEntry) GetProgressSafe() int {
if m == nil {
return 0
}
if m.Progress == nil {
return 0
}
return *m.Progress
}
func (m *AnimeListEntry) GetScoreSafe() float64 {
if m == nil {
return 0
}
if m.Score == nil {
return 0
}
return *m.Score
}
func (m *AnimeListEntry) GetRepeatSafe() int {
if m == nil {
return 0
}
if m.Repeat == nil {
return 0
}
return *m.Repeat
}
func (m *AnimeListEntry) GetStatusSafe() MediaListStatus {
if m == nil {
return ""
}
if m.Status == nil {
return ""
}
return *m.Status
}

View File

@@ -0,0 +1,155 @@
package anilist
import (
"context"
"github.com/samber/lo"
"seanime/internal/util"
"seanime/internal/util/limiter"
"seanime/internal/util/result"
"sync"
)
type (
CompleteAnimeRelationTree struct {
*result.Map[int, *CompleteAnime]
}
FetchMediaTreeRelation = string
)
const (
FetchMediaTreeSequels FetchMediaTreeRelation = "sequels"
FetchMediaTreePrequels FetchMediaTreeRelation = "prequels"
FetchMediaTreeAll FetchMediaTreeRelation = "all"
)
// NewCompleteAnimeRelationTree returns a new result.Map[int, *CompleteAnime].
// It is used to store the results of FetchMediaTree or FetchMediaTree calls.
func NewCompleteAnimeRelationTree() *CompleteAnimeRelationTree {
return &CompleteAnimeRelationTree{result.NewResultMap[int, *CompleteAnime]()}
}
func (m *BaseAnime) FetchMediaTree(rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) (err error) {
if m == nil {
return nil
}
defer util.HandlePanicInModuleWithError("anilist/BaseAnime.FetchMediaTree", &err)
rl.Wait()
res, err := anilistClient.CompleteAnimeByID(context.Background(), &m.ID)
if err != nil {
return err
}
return res.GetMedia().FetchMediaTree(rel, anilistClient, rl, tree, cache)
}
// FetchMediaTree populates the CompleteAnimeRelationTree with the given media's sequels and prequels.
// It also takes a CompleteAnimeCache to store the fetched media in and avoid duplicate fetches.
// It also takes a limiter.Limiter to limit the number of requests made to the AniList API.
func (m *CompleteAnime) FetchMediaTree(rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) (err error) {
if m == nil {
return nil
}
defer util.HandlePanicInModuleWithError("anilist/CompleteAnime.FetchMediaTree", &err)
if tree.Has(m.ID) {
cache.Set(m.ID, m)
return nil
}
cache.Set(m.ID, m)
tree.Set(m.ID, m)
if m.Relations == nil {
return nil
}
// Get all edges
edges := m.GetRelations().GetEdges()
// Filter edges
edges = lo.Filter(edges, func(_edge *CompleteAnime_Relations_Edges, _ int) bool {
return (*_edge.RelationType == MediaRelationSequel || *_edge.RelationType == MediaRelationPrequel) &&
*_edge.GetNode().Status != MediaStatusNotYetReleased &&
_edge.IsBroadRelationFormat() && !tree.Has(_edge.GetNode().ID)
})
if len(edges) == 0 {
return nil
}
doneCh := make(chan struct{})
processEdges(edges, rel, anilistClient, rl, tree, cache, doneCh)
for {
select {
case <-doneCh:
return nil
default:
}
}
}
// processEdges fetches the next node(s) for each edge in parallel.
func processEdges(edges []*CompleteAnime_Relations_Edges, rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache, doneCh chan struct{}) {
var wg sync.WaitGroup
wg.Add(len(edges))
for i, item := range edges {
go func(edge *CompleteAnime_Relations_Edges, _ int) {
defer wg.Done()
if edge == nil {
return
}
processEdge(edge, rel, anilistClient, rl, tree, cache)
}(item, i)
}
wg.Wait()
go func() {
close(doneCh)
}()
}
func processEdge(edge *CompleteAnime_Relations_Edges, rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) {
defer util.HandlePanicInModuleThen("anilist/processEdge", func() {})
cacheV, ok := cache.Get(edge.GetNode().ID)
edgeCompleteAnime := cacheV
if !ok {
rl.Wait()
// Fetch the next node
res, err := anilistClient.CompleteAnimeByID(context.Background(), &edge.GetNode().ID)
if err == nil {
edgeCompleteAnime = res.GetMedia()
cache.Set(edgeCompleteAnime.ID, edgeCompleteAnime)
}
}
if edgeCompleteAnime == nil {
return
}
// Get the relation type to fetch for the next node
edgeRel := getEdgeRelation(edge, rel)
// Fetch the next node(s)
err := edgeCompleteAnime.FetchMediaTree(edgeRel, anilistClient, rl, tree, cache)
if err != nil {
return
}
}
// getEdgeRelation returns the relation to fetch for the next node based on the current edge and the relation to fetch.
// If the relation to fetch is FetchMediaTreeAll, it will return FetchMediaTreePrequels for prequels and FetchMediaTreeSequels for sequels.
//
// For example, if the current node is a sequel and the relation to fetch is FetchMediaTreeAll, it will return FetchMediaTreeSequels so that
// only sequels are fetched for the next node.
func getEdgeRelation(edge *CompleteAnime_Relations_Edges, rel FetchMediaTreeRelation) FetchMediaTreeRelation {
if rel == FetchMediaTreeAll {
if *edge.RelationType == MediaRelationPrequel {
return FetchMediaTreePrequels
}
if *edge.RelationType == MediaRelationSequel {
return FetchMediaTreeSequels
}
}
return rel
}

View File

@@ -0,0 +1,82 @@
package anilist
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"seanime/internal/test_utils"
"seanime/internal/util/limiter"
"testing"
)
func TestBaseAnime_FetchMediaTree_BaseAnime(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
lim := limiter.NewAnilistLimiter()
completeAnimeCache := NewCompleteAnimeCache()
tests := []struct {
name string
mediaId int
edgeIds []int
}{
{
name: "Bungo Stray Dogs",
mediaId: 103223,
edgeIds: []int{
21311, // BSD1
21679, // BSD2
103223, // BSD3
141249, // BSD4
163263, // BSD5
},
},
{
name: "Re:Zero",
mediaId: 21355,
edgeIds: []int{
21355, // Re:Zero 1
108632, // Re:Zero 2
119661, // Re:Zero 2 Part 2
163134, // Re:Zero 3
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mediaF, err := anilistClient.CompleteAnimeByID(context.Background(), &tt.mediaId)
if assert.NoError(t, err) {
media := mediaF.GetMedia()
tree := NewCompleteAnimeRelationTree()
err = media.FetchMediaTree(
FetchMediaTreeAll,
anilistClient,
lim,
tree,
completeAnimeCache,
)
if assert.NoError(t, err) {
for _, treeId := range tt.edgeIds {
a, found := tree.Get(treeId)
assert.Truef(t, found, "expected tree to contain %d", treeId)
spew.Dump(a.GetTitleSafe())
}
}
}
})
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,456 @@
query AnimeCollection ($userName: String) {
MediaListCollection(userName: $userName, forceSingleCompletedList: true, type: ANIME) {
lists {
status
name
isCustomList
entries {
id
score(format: POINT_100)
progress
status
notes
repeat
private
startedAt {
year
month
day
}
completedAt {
year
month
day
}
media {
...baseAnime
}
}
}
}
}
query AnimeCollectionWithRelations ($userName: String) {
MediaListCollection(userName: $userName, forceSingleCompletedList: true, type: ANIME) {
lists {
status
name
isCustomList
entries {
id
score(format: POINT_100)
progress
status
notes
repeat
private
startedAt {
year
month
day
}
completedAt {
year
month
day
}
media {
...completeAnime
}
}
}
}
}
query BaseAnimeByMalId ($id: Int) {
Media(idMal: $id, type: ANIME) {
...baseAnime
}
}
query BaseAnimeById ($id: Int) {
Media(id: $id, type: ANIME) {
...baseAnime
}
}
query SearchBaseAnimeByIds ($ids: [Int], $page: Int, $perPage: Int, $status: [MediaStatus], $inCollection: Boolean, $sort: [MediaSort], $season: MediaSeason, $year: Int, $genre: String, $format: MediaFormat) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
},
media(id_in: $ids, type: ANIME, status_in: $status, onList: $inCollection, sort: $sort, season: $season, seasonYear: $year, genre: $genre, format: $format) {
...baseAnime
}
}
}
query CompleteAnimeById ($id: Int) {
Media(id: $id, type: ANIME) {
...completeAnime
}
}
# For view (will be cached)
query AnimeDetailsById ($id: Int) {
Media(id: $id, type: ANIME) {
siteUrl
id
duration
genres
averageScore
popularity
meanScore
description
trailer {
id
site
thumbnail
}
startDate {
year
month
day
}
endDate {
year
month
day
}
studios(isMain: true) {
nodes {
name
id
}
}
characters(sort: [ROLE]) {
edges {
id
role
name
node {
...baseCharacter
}
}
}
staff(sort: [RELEVANCE]) {
edges {
role
node {
name {
full
}
id
}
}
}
rankings {
context
type
rank
year
format
allTime
season
}
recommendations(page: 1, perPage: 8, sort: RATING_DESC) {
edges {
node {
mediaRecommendation {
id
idMal
siteUrl
status(version: 2)
isAdult
season
type
format
meanScore
description
episodes
trailer {
id
site
thumbnail
}
startDate {
year
month
day
}
coverImage {
extraLarge
large
medium
color
}
bannerImage
title {
romaji
english
native
userPreferred
}
}
}
}
}
relations {
edges {
relationType(version: 2)
node {
...baseAnime
}
}
}
}
}
query ListAnime(
$page: Int
$search: String
$perPage: Int
$sort: [MediaSort]
$status: [MediaStatus]
$genres: [String]
$averageScore_greater: Int
$season: MediaSeason
$seasonYear: Int
$format: MediaFormat
$isAdult: Boolean
) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
total
perPage
currentPage
lastPage
}
media(
type: ANIME
search: $search
sort: $sort
status_in: $status
isAdult: $isAdult
format: $format
genre_in: $genres
averageScore_greater: $averageScore_greater
season: $season
seasonYear: $seasonYear
format_not: MUSIC
) {
...baseAnime
}
}
}
query ListRecentAnime ($page: Int, $perPage: Int, $airingAt_greater: Int, $airingAt_lesser: Int, $notYetAired: Boolean = false) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
total
perPage
currentPage
lastPage
}
airingSchedules(notYetAired: $notYetAired, sort: TIME_DESC, airingAt_greater: $airingAt_greater, airingAt_lesser: $airingAt_lesser) {
id
airingAt
episode
timeUntilAiring
media {
... baseAnime
}
}
}
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
seasonYear
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}
fragment completeAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
seasonYear
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
relations {
edges {
relationType(version: 2)
node {
...baseAnime
}
}
}
}
fragment baseCharacter on Character {
id
isFavourite
gender
age
dateOfBirth {
year
month
day
}
name {
full
native
alternative
}
image {
large
}
description
siteUrl
}
query AnimeAiringSchedule($ids: [Int],$season: MediaSeason, $seasonYear: Int, $previousSeason: MediaSeason, $previousSeasonYear: Int, $nextSeason: MediaSeason, $nextSeasonYear: Int) {
ongoing: Page {
media(id_in: $ids, type: ANIME, season: $season, seasonYear: $seasonYear, onList: true) {
...animeSchedule
}
}
ongoingNext: Page(page: 2) {
media(id_in: $ids, type: ANIME, season: $season, seasonYear: $seasonYear, onList: true) {
...animeSchedule
}
}
upcoming: Page {
media(id_in: $ids, type: ANIME, season: $nextSeason, seasonYear: $nextSeasonYear, sort: [START_DATE], onList: true) {
...animeSchedule
}
}
upcomingNext: Page(page: 2) {
media(id_in: $ids, type: ANIME, season: $nextSeason, seasonYear: $nextSeasonYear, sort: [START_DATE], onList: true) {
...animeSchedule
}
}
preceding: Page {
media(id_in: $ids, type: ANIME, season: $previousSeason, seasonYear: $previousSeasonYear, onList: true) {
...animeSchedule
}
}
}
query AnimeAiringScheduleRaw($ids: [Int]) {
Page {
media(id_in: $ids, type: ANIME, onList: true) {
...animeSchedule
}
}
}
fragment animeSchedule on Media {
id,
idMal
previous: airingSchedule(notYetAired: false, perPage: 30) {
nodes {
airingAt
timeUntilAiring
episode
}
},
upcoming: airingSchedule(notYetAired: true, perPage: 30) {
nodes {
airingAt
timeUntilAiring
episode
}
}
}

View File

@@ -0,0 +1,56 @@
mutation UpdateMediaListEntry (
$mediaId: Int
$status: MediaListStatus
$scoreRaw: Int
$progress: Int
$startedAt: FuzzyDateInput
$completedAt: FuzzyDateInput
) {
SaveMediaListEntry(
mediaId: $mediaId
status: $status
scoreRaw: $scoreRaw
progress: $progress
startedAt: $startedAt
completedAt: $completedAt
) {
id
}
}
mutation UpdateMediaListEntryProgress (
$mediaId: Int
$progress: Int
$status: MediaListStatus
) {
SaveMediaListEntry(
mediaId: $mediaId
progress: $progress
status: $status
) {
id
}
}
mutation DeleteEntry (
$mediaListEntryId: Int
) {
DeleteMediaListEntry(
id: $mediaListEntryId
) {
deleted
}
}
mutation UpdateMediaListEntryRepeat (
$mediaId: Int
$repeat: Int
) {
SaveMediaListEntry(
mediaId: $mediaId
repeat: $repeat
) {
id
}
}

View File

@@ -0,0 +1,200 @@
query MangaCollection ($userName: String) {
MediaListCollection(userName: $userName, forceSingleCompletedList: true, type: MANGA) {
lists {
status
name
isCustomList
entries {
id
score(format: POINT_100)
progress
status
notes
repeat
private
startedAt {
year
month
day
}
completedAt {
year
month
day
}
media {
...baseManga
}
}
}
}
}
query SearchBaseManga($page: Int, $perPage: Int, $sort: [MediaSort], $search: String, $status: [MediaStatus]){
Page(page: $page, perPage: $perPage){
pageInfo{
hasNextPage
},
media(type: MANGA, search: $search, sort: $sort, status_in: $status, format_not: NOVEL){
...baseManga
}
}
}
query BaseMangaById ($id: Int) {
Media(id: $id, type: MANGA) {
...baseManga
}
}
# For view (will be cached)
query MangaDetailsById ($id: Int) {
Media(id: $id, type: MANGA) {
siteUrl
id
duration
genres
rankings {
context
type
rank
year
format
allTime
season
}
characters(sort: [ROLE]) {
edges {
id
role
name
node {
...baseCharacter
}
}
}
recommendations(page: 1, perPage: 8, sort: RATING_DESC) {
edges {
node {
mediaRecommendation {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
chapters
volumes
synonyms
isAdult
countryOfOrigin
meanScore
description
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
}
}
}
}
relations {
edges {
relationType(version: 2)
node {
...baseManga
}
}
}
}
}
query ListManga(
$page: Int
$search: String
$perPage: Int
$sort: [MediaSort]
$status: [MediaStatus]
$genres: [String]
$averageScore_greater: Int
$startDate_greater: FuzzyDateInt
$startDate_lesser: FuzzyDateInt
$format: MediaFormat
$countryOfOrigin: CountryCode
$isAdult: Boolean
) {
Page(page: $page, perPage: $perPage){
pageInfo{
hasNextPage
total
perPage
currentPage
lastPage
},
media(type: MANGA, isAdult: $isAdult, countryOfOrigin: $countryOfOrigin, search: $search, sort: $sort, status_in: $status, format: $format, genre_in: $genres, averageScore_greater: $averageScore_greater, startDate_greater: $startDate_greater, startDate_lesser: $startDate_lesser, format_not: NOVEL){
...baseManga
}
}
}
fragment baseManga on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
chapters
volumes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
}

View File

@@ -0,0 +1,126 @@
query ViewerStats {
Viewer {
statistics {
anime {
count
minutesWatched
episodesWatched
meanScore
formats {
...UserFormatStats
}
genres {
...UserGenreStats
}
statuses {
...UserStatusStats
}
studios {
...UserStudioStats
}
scores {
...UserScoreStats
}
startYears {
...UserStartYearStats
}
releaseYears {
...UserReleaseYearStats
}
}
manga {
count
chaptersRead
meanScore
formats {
...UserFormatStats
}
genres {
...UserGenreStats
}
statuses {
...UserStatusStats
}
studios {
...UserStudioStats
}
scores {
...UserScoreStats
}
startYears {
...UserStartYearStats
}
releaseYears {
...UserReleaseYearStats
}
}
}
}
}
fragment UserFormatStats on UserFormatStatistic {
format
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserGenreStats on UserGenreStatistic {
genre
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserStatusStats on UserStatusStatistic {
status
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserScoreStats on UserScoreStatistic {
score
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserStudioStats on UserStudioStatistic {
studio {
id
name
isAnimationStudio
}
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserStartYearStats on UserStartYearStatistic {
startYear
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserReleaseYearStats on UserReleaseYearStatistic {
releaseYear
meanScore
count
minutesWatched
mediaIds
chaptersRead
}

View File

@@ -0,0 +1,12 @@
query StudioDetails($id: Int) {
Studio(id: $id) {
id
isAnimationStudio
name
media (perPage: 80, sort: TRENDING_DESC, isMain: true) {
nodes {
...baseAnime
}
}
}
}

View File

@@ -0,0 +1,16 @@
query GetViewer {
Viewer {
name
avatar {
large
medium
}
bannerImage
isBlocked
options {
displayAdultContent
airingNotifications
profileColor
}
}
}

View File

@@ -0,0 +1,72 @@
package anilist
import (
"context"
"seanime/internal/util"
)
type (
Stats struct {
AnimeStats *AnimeStats `json:"animeStats"`
MangaStats *MangaStats `json:"mangaStats"`
}
AnimeStats struct {
Count int `json:"count"`
MinutesWatched int `json:"minutesWatched"`
EpisodesWatched int `json:"episodesWatched"`
MeanScore float64 `json:"meanScore"`
Genres []*UserGenreStats `json:"genres"`
Formats []*UserFormatStats `json:"formats"`
Statuses []*UserStatusStats `json:"statuses"`
Studios []*UserStudioStats `json:"studios"`
Scores []*UserScoreStats `json:"scores"`
StartYears []*UserStartYearStats `json:"startYears"`
ReleaseYears []*UserReleaseYearStats `json:"releaseYears"`
}
MangaStats struct {
Count int `json:"count"`
ChaptersRead int `json:"chaptersRead"`
MeanScore float64 `json:"meanScore"`
Genres []*UserGenreStats `json:"genres"`
Statuses []*UserStatusStats `json:"statuses"`
Scores []*UserScoreStats `json:"scores"`
StartYears []*UserStartYearStats `json:"startYears"`
ReleaseYears []*UserReleaseYearStats `json:"releaseYears"`
}
)
func GetStats(ctx context.Context, stats *ViewerStats) (ret *Stats, err error) {
defer util.HandlePanicInModuleWithError("api/anilist/GetStats", &err)
allStats := stats.GetViewer().GetStatistics()
ret = &Stats{
AnimeStats: &AnimeStats{
Count: allStats.GetAnime().GetCount(),
MinutesWatched: allStats.GetAnime().GetMinutesWatched(),
EpisodesWatched: allStats.GetAnime().GetEpisodesWatched(),
MeanScore: allStats.GetAnime().GetMeanScore(),
Genres: allStats.GetAnime().GetGenres(),
Formats: allStats.GetAnime().GetFormats(),
Statuses: allStats.GetAnime().GetStatuses(),
Studios: allStats.GetAnime().GetStudios(),
Scores: allStats.GetAnime().GetScores(),
StartYears: allStats.GetAnime().GetStartYears(),
ReleaseYears: allStats.GetAnime().GetReleaseYears(),
},
MangaStats: &MangaStats{
Count: allStats.GetManga().GetCount(),
ChaptersRead: allStats.GetManga().GetChaptersRead(),
MeanScore: allStats.GetManga().GetMeanScore(),
Genres: allStats.GetManga().GetGenres(),
Statuses: allStats.GetManga().GetStatuses(),
Scores: allStats.GetManga().GetScores(),
StartYears: allStats.GetManga().GetStartYears(),
ReleaseYears: allStats.GetManga().GetReleaseYears(),
},
}
return ret, nil
}

View File

@@ -0,0 +1,60 @@
package anilist
import (
"time"
)
type GetSeasonKind int
const (
GetSeasonKindCurrent GetSeasonKind = iota
GetSeasonKindNext
GetSeasonKindPrevious
)
func GetSeasonInfo(now time.Time, kind GetSeasonKind) (MediaSeason, int) {
month, year := now.Month(), now.Year()
getSeasonIndex := func(m time.Month) int {
switch {
case m >= 3 && m <= 5: // spring: 3, 4, 5
return 1
case m >= 6 && m <= 8: // summer: 6, 7, 8
return 2
case m >= 9 && m <= 11: // fall: 9, 10, 11
return 3
default: // winter: 12, 1, 2
return 0
}
}
seasons := []MediaSeason{MediaSeasonWinter, MediaSeasonSpring, MediaSeasonSummer, MediaSeasonFall}
var index int
switch kind {
case GetSeasonKindCurrent:
index = getSeasonIndex(month)
case GetSeasonKindNext:
nextMonth := month + 3
nextYear := year
if nextMonth > 12 {
nextMonth -= 12
nextYear++
}
index = getSeasonIndex(nextMonth)
year = nextYear
case GetSeasonKindPrevious:
prevMonth := month - 3
prevYear := year
if prevMonth <= 0 {
prevMonth += 12
prevYear--
}
index = getSeasonIndex(prevMonth)
year = prevYear
}
return seasons[index], year
}

View File

@@ -0,0 +1,34 @@
package anilist
import (
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestGetSeason(t *testing.T) {
tests := []struct {
now time.Time
kind GetSeasonKind
expectedSeason MediaSeason
expectedYear int
}{
{time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonWinter, 2025},
{time.Date(2025, 4, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonSpring, 2025},
{time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonSummer, 2025},
{time.Date(2025, 10, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonFall, 2025},
{time.Date(2025, 10, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindNext, MediaSeasonWinter, 2026},
{time.Date(2025, 12, 31, 23, 59, 59, 999999999, time.UTC), GetSeasonKindCurrent, MediaSeasonWinter, 2025},
{time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindNext, MediaSeasonSpring, 2025},
}
for _, tt := range tests {
t.Run(tt.now.Format(time.RFC3339), func(t *testing.T) {
t.Logf("%s", tt.now.Format(time.RFC3339))
season, year := GetSeasonInfo(tt.now, tt.kind)
require.Equal(t, tt.expectedSeason, season, "Expected season %v, got %v", tt.expectedSeason, season)
require.Equal(t, tt.expectedYear, year, "Expected year %d, got %d", tt.expectedYear, year)
})
}
}

View File

@@ -0,0 +1,137 @@
package animap
import (
"errors"
"io"
"net/http"
"seanime/internal/constants"
"seanime/internal/hook"
"seanime/internal/util/result"
"strconv"
"github.com/goccy/go-json"
)
type (
Anime struct {
Title string `json:"title"`
Titles map[string]string `json:"titles,omitempty"`
StartDate string `json:"startDate,omitempty"` // YYYY-MM-DD
EndDate string `json:"endDate,omitempty"` // YYYY-MM-DD
Status string `json:"status"` // Finished, Airing, Upcoming, etc.
Type string `json:"type"` // TV, OVA, Movie, etc.
Episodes map[string]*Episode `json:"episodes,omitzero"` // Indexed by AniDB episode number, "1", "S1", etc.
Mappings *AnimeMapping `json:"mappings,omitzero"`
}
AnimeMapping struct {
AnidbID int `json:"anidb_id,omitempty"`
AnilistID int `json:"anilist_id,omitempty"`
KitsuID int `json:"kitsu_id,omitempty"`
TheTvdbID int `json:"thetvdb_id,omitempty"`
TheMovieDbID string `json:"themoviedb_id,omitempty"` // Can be int or string, forced to string
MalID int `json:"mal_id,omitempty"`
LivechartID int `json:"livechart_id,omitempty"`
AnimePlanetID string `json:"animeplanet_id,omitempty"` // Can be int or string, forced to string
AnisearchID int `json:"anisearch_id,omitempty"`
SimklID int `json:"simkl_id,omitempty"`
NotifyMoeID string `json:"notifymoe_id,omitempty"`
AnimecountdownID int `json:"animecountdown_id,omitempty"`
Type string `json:"type,omitempty"`
}
Episode struct {
AnidbEpisode string `json:"anidbEpisode"`
AnidbId int `json:"anidbEid"`
TvdbId int `json:"tvdbEid,omitempty"`
TvdbShowId int `json:"tvdbShowId,omitempty"`
AirDate string `json:"airDate,omitempty"` // YYYY-MM-DD
AnidbTitle string `json:"anidbTitle,omitempty"` // Title of the episode from AniDB
TvdbTitle string `json:"tvdbTitle,omitempty"` // Title of the episode from TVDB
Overview string `json:"overview,omitempty"`
Image string `json:"image,omitempty"`
Runtime int `json:"runtime,omitempty"` // minutes
Length string `json:"length,omitempty"` // Xm
SeasonNumber int `json:"seasonNumber,omitempty"`
SeasonName string `json:"seasonName,omitempty"`
Number int `json:"number"`
AbsoluteNumber int `json:"absoluteNumber,omitempty"`
}
)
//----------------------------------------------------------------------------------------------------------------------
type Cache struct {
*result.Cache[string, *Anime]
}
// FetchAnimapMedia fetches animap.Anime from the Animap API.
func FetchAnimapMedia(from string, id int) (*Anime, error) {
// Event
reqEvent := &AnimapMediaRequestedEvent{
From: from,
Id: id,
Media: &Anime{},
}
err := hook.GlobalHookManager.OnAnimapMediaRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if reqEvent.DefaultPrevented {
return reqEvent.Media, nil
}
from = reqEvent.From
id = reqEvent.Id
apiUrl := constants.InternalMetadataURL + "/entry?" + from + "_id=" + strconv.Itoa(id)
request, err := http.NewRequest("GET", apiUrl, nil)
if err != nil {
return nil, err
}
request.Header.Set("X-Seanime-Version", "Seanime/"+constants.Version)
// Send an HTTP GET request
response, err := http.DefaultClient.Do(request)
if err != nil {
return nil, err
}
defer response.Body.Close()
if response.StatusCode != 200 {
return nil, errors.New("not found on Animap")
}
// Read the response body
responseBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
// Unmarshal the JSON data into AnimapData
var media Anime
if err := json.Unmarshal(responseBody, &media); err != nil {
return nil, err
}
// Event
event := &AnimapMediaEvent{
Media: &media,
}
err = hook.GlobalHookManager.OnAnimapMedia().Trigger(event)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if event.DefaultPrevented {
return event.Media, nil
}
return event.Media, nil
}

View File

@@ -0,0 +1,19 @@
package animap
import "seanime/internal/hook_resolver"
// AnimapMediaRequestedEvent is triggered when the Animap media is requested.
// Prevent default to skip the default behavior and return your own data.
type AnimapMediaRequestedEvent struct {
hook_resolver.Event
From string `json:"from"`
Id int `json:"id"`
// Empty data object, will be used if the hook prevents the default behavior
Media *Anime `json:"media"`
}
// AnimapMediaEvent is triggered after processing AnimapMedia.
type AnimapMediaEvent struct {
hook_resolver.Event
Media *Anime `json:"media"`
}

View File

@@ -0,0 +1,156 @@
package anizip
import (
"errors"
"io"
"net/http"
"seanime/internal/hook"
"seanime/internal/util/result"
"strconv"
"github.com/goccy/go-json"
)
// AniZip is the API used for fetching anime metadata and mappings.
type (
Episode struct {
TvdbEid int `json:"tvdbEid,omitempty"`
AirDate string `json:"airdate,omitempty"`
SeasonNumber int `json:"seasonNumber,omitempty"`
EpisodeNumber int `json:"episodeNumber,omitempty"`
AbsoluteEpisodeNumber int `json:"absoluteEpisodeNumber,omitempty"`
Title map[string]string `json:"title,omitempty"`
Image string `json:"image,omitempty"`
Summary string `json:"summary,omitempty"`
Overview string `json:"overview,omitempty"`
Runtime int `json:"runtime,omitempty"`
Length int `json:"length,omitempty"`
Episode string `json:"episode,omitempty"`
AnidbEid int `json:"anidbEid,omitempty"`
Rating string `json:"rating,omitempty"`
}
Mappings struct {
AnimeplanetID string `json:"animeplanet_id,omitempty"`
KitsuID int `json:"kitsu_id,omitempty"`
MalID int `json:"mal_id,omitempty"`
Type string `json:"type,omitempty"`
AnilistID int `json:"anilist_id,omitempty"`
AnisearchID int `json:"anisearch_id,omitempty"`
AnidbID int `json:"anidb_id,omitempty"`
NotifymoeID string `json:"notifymoe_id,omitempty"`
LivechartID int `json:"livechart_id,omitempty"`
ThetvdbID int `json:"thetvdb_id,omitempty"`
ImdbID string `json:"imdb_id,omitempty"`
ThemoviedbID string `json:"themoviedb_id,omitempty"`
}
Media struct {
Titles map[string]string `json:"titles"`
Episodes map[string]Episode `json:"episodes"`
EpisodeCount int `json:"episodeCount"`
SpecialCount int `json:"specialCount"`
Mappings *Mappings `json:"mappings"`
}
)
//----------------------------------------------------------------------------------------------------------------------
type Cache struct {
*result.Cache[string, *Media]
}
func NewCache() *Cache {
return &Cache{result.NewCache[string, *Media]()}
}
func GetCacheKey(from string, id int) string {
return from + strconv.Itoa(id)
}
//----------------------------------------------------------------------------------------------------------------------
// FetchAniZipMedia fetches anizip.Media from the AniZip API.
func FetchAniZipMedia(from string, id int) (*Media, error) {
// Event
reqEvent := &AnizipMediaRequestedEvent{
From: from,
Id: id,
Media: &Media{},
}
err := hook.GlobalHookManager.OnAnizipMediaRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if reqEvent.DefaultPrevented {
return reqEvent.Media, nil
}
from = reqEvent.From
id = reqEvent.Id
apiUrl := "https://api.ani.zip/v1/episodes?" + from + "_id=" + strconv.Itoa(id)
// Send an HTTP GET request
response, err := http.Get(apiUrl)
if err != nil {
return nil, err
}
defer response.Body.Close()
if response.StatusCode != 200 {
return nil, errors.New("not found on AniZip")
}
// Read the response body
responseBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
// Unmarshal the JSON data into AniZipData
var media Media
if err := json.Unmarshal(responseBody, &media); err != nil {
return nil, err
}
// Event
event := &AnizipMediaEvent{
Media: &media,
}
err = hook.GlobalHookManager.OnAnizipMedia().Trigger(event)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if event.DefaultPrevented {
return event.Media, nil
}
return event.Media, nil
}
// FetchAniZipMediaC is the same as FetchAniZipMedia but uses a cache.
// If the media is found in the cache, it will be returned.
// If the media is not found in the cache, it will be fetched and then added to the cache.
func FetchAniZipMediaC(from string, id int, cache *Cache) (*Media, error) {
cacheV, ok := cache.Get(GetCacheKey(from, id))
if ok {
return cacheV, nil
}
media, err := FetchAniZipMedia(from, id)
if err != nil {
return nil, err
}
cache.Set(GetCacheKey(from, id), media)
return media, nil
}

View File

@@ -0,0 +1,65 @@
package anizip
func (m *Media) GetTitle() string {
if m == nil {
return ""
}
if len(m.Titles["en"]) > 0 {
return m.Titles["en"]
}
return m.Titles["ro"]
}
func (m *Media) GetMappings() *Mappings {
if m == nil {
return &Mappings{}
}
return m.Mappings
}
func (m *Media) FindEpisode(ep string) (*Episode, bool) {
if m.Episodes == nil {
return nil, false
}
episode, found := m.Episodes[ep]
if !found {
return nil, false
}
return &episode, true
}
func (m *Media) GetMainEpisodeCount() int {
if m == nil {
return 0
}
return m.EpisodeCount
}
// GetOffset returns the offset of the first episode relative to the absolute episode number.
// e.g, if the first episode's absolute number is 13, then the offset is 12.
func (m *Media) GetOffset() int {
if m == nil {
return 0
}
firstEp, found := m.FindEpisode("1")
if !found {
return 0
}
if firstEp.AbsoluteEpisodeNumber == 0 {
return 0
}
return firstEp.AbsoluteEpisodeNumber - 1
}
func (e *Episode) GetTitle() string {
eng, ok := e.Title["en"]
if ok {
return eng
}
rom, ok := e.Title["x-jat"]
if ok {
return rom
}
return ""
}

View File

@@ -0,0 +1,37 @@
package anizip
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestFetchAniZipMedia(t *testing.T) {
tests := []struct {
name string
provider string
id int
expectedTitle string
}{
{
name: "Cowboy Bebop",
provider: "anilist",
id: 1,
expectedTitle: "Cowboy Bebop",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
media, err := FetchAniZipMedia(test.provider, test.id)
if assert.NoError(t, err) {
if assert.NotNil(t, media) {
assert.Equal(t, media.GetTitle(), test.expectedTitle)
}
}
})
}
}

View File

@@ -0,0 +1,19 @@
package anizip
import "seanime/internal/hook_resolver"
// AnizipMediaRequestedEvent is triggered when the AniZip media is requested.
// Prevent default to skip the default behavior and return your own data.
type AnizipMediaRequestedEvent struct {
hook_resolver.Event
From string `json:"from"`
Id int `json:"id"`
// Empty data object, will be used if the hook prevents the default behavior
Media *Media `json:"media"`
}
// AnizipMediaEvent is triggered after processing AnizipMedia.
type AnizipMediaEvent struct {
hook_resolver.Event
Media *Media `json:"media"`
}

View File

@@ -0,0 +1,185 @@
package filler
import (
"fmt"
"seanime/internal/util"
"strings"
"github.com/adrg/strutil/metrics"
"github.com/gocolly/colly"
"github.com/rs/zerolog"
)
type (
SearchOptions struct {
Titles []string
}
SearchResult struct {
Slug string
Title string
}
API interface {
Search(opts SearchOptions) (*SearchResult, error)
FindFillerData(slug string) (*Data, error)
}
Data struct {
FillerEpisodes []string `json:"fillerEpisodes"`
}
)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type (
AnimeFillerList struct {
baseUrl string
userAgent string
logger *zerolog.Logger
}
)
func NewAnimeFillerList(logger *zerolog.Logger) *AnimeFillerList {
return &AnimeFillerList{
baseUrl: "https://www.animefillerlist.com",
userAgent: util.GetRandomUserAgent(),
logger: logger,
}
}
func (af *AnimeFillerList) Search(opts SearchOptions) (result *SearchResult, err error) {
defer util.HandlePanicInModuleWithError("api/metadata/filler/Search", &err)
c := colly.NewCollector(
colly.UserAgent(af.userAgent),
)
ret := make([]*SearchResult, 0)
c.OnHTML("div.Group > ul > li > a", func(e *colly.HTMLElement) {
ret = append(ret, &SearchResult{
Slug: e.Attr("href"),
Title: e.Text,
})
})
err = c.Visit(fmt.Sprintf("%s/shows", af.baseUrl))
if err != nil {
return nil, err
}
if len(ret) == 0 {
return nil, fmt.Errorf("no results found")
}
lev := metrics.NewLevenshtein()
lev.CaseSensitive = false
compResults := make([]struct {
OriginalValue string
Value string
Distance int
}, 0)
for _, result := range ret {
firstTitle := result.Title
secondTitle := ""
// Check if a second title exists between parentheses
if strings.LastIndex(firstTitle, " (") != -1 && strings.LastIndex(firstTitle, ")") != -1 {
secondTitle = firstTitle[strings.LastIndex(firstTitle, " (")+2 : strings.LastIndex(firstTitle, ")")]
if !util.IsMostlyLatinString(secondTitle) {
secondTitle = ""
}
}
if secondTitle != "" {
firstTitle = firstTitle[:strings.LastIndex(firstTitle, " (")]
}
for _, mediaTitle := range opts.Titles {
compResults = append(compResults, struct {
OriginalValue string
Value string
Distance int
}{
OriginalValue: result.Title,
Value: firstTitle,
Distance: lev.Distance(mediaTitle, firstTitle),
})
if secondTitle != "" {
compResults = append(compResults, struct {
OriginalValue string
Value string
Distance int
}{
OriginalValue: result.Title,
Value: secondTitle,
Distance: lev.Distance(mediaTitle, secondTitle),
})
}
}
}
// Find the best match
bestResult := struct {
OriginalValue string
Value string
Distance int
}{}
for _, result := range compResults {
if bestResult.OriginalValue == "" || result.Distance <= bestResult.Distance {
if bestResult.OriginalValue != "" && result.Distance == bestResult.Distance && len(result.OriginalValue) > len(bestResult.OriginalValue) {
continue
}
bestResult = result
}
}
if bestResult.OriginalValue == "" {
return nil, fmt.Errorf("no results found")
}
if bestResult.Distance > 10 {
return nil, fmt.Errorf("no results found")
}
// Get the result
for _, r := range ret {
if r.Title == bestResult.OriginalValue {
return r, nil
}
}
return
}
func (af *AnimeFillerList) FindFillerData(slug string) (ret *Data, err error) {
defer util.HandlePanicInModuleWithError("api/metadata/filler/FindFillerEpisodes", &err)
c := colly.NewCollector(
colly.UserAgent(af.userAgent),
)
ret = &Data{
FillerEpisodes: make([]string, 0),
}
fillerEps := make([]string, 0)
c.OnHTML("tr.filler", func(e *colly.HTMLElement) {
fillerEps = append(fillerEps, e.ChildText("td.Number"))
})
err = c.Visit(fmt.Sprintf("%s%s", af.baseUrl, slug))
if err != nil {
return nil, err
}
ret.FillerEpisodes = fillerEps
return
}

View File

@@ -0,0 +1,24 @@
package filler
import (
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
)
func TestAnimeFillerList_Search(t *testing.T) {
af := NewAnimeFillerList(util.NewLogger())
opts := SearchOptions{
Titles: []string{"Hunter x Hunter (2011)"},
}
ret, err := af.Search(opts)
if err != nil {
t.Error(err)
}
spew.Dump(ret)
}

View File

@@ -0,0 +1,186 @@
package mal
import (
"fmt"
"net/url"
)
const (
BaseAnimeFields string = "id,title,main_picture,alternative_titles,start_date,end_date,start_season,nsfw,synopsis,num_episodes,mean,rank,popularity,media_type,status"
)
type (
BasicAnime struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
AlternativeTitles struct {
Synonyms []string `json:"synonyms"`
En string `json:"en"`
Ja string `json:"ja"`
} `json:"alternative_titles"`
StartDate string `json:"start_date"`
EndDate string `json:"end_date"`
StartSeason struct {
Year int `json:"year"`
Season string `json:"season"`
} `json:"start_season"`
Synopsis string `json:"synopsis"`
NSFW string `json:"nsfw"`
NumEpisodes int `json:"num_episodes"`
Mean float32 `json:"mean"`
Rank int `json:"rank"`
Popularity int `json:"popularity"`
MediaType MediaType `json:"media_type"`
Status MediaStatus `json:"status"`
}
AnimeListEntry struct {
Node struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
} `json:"node"`
ListStatus struct {
Status MediaListStatus `json:"status"`
IsRewatching bool `json:"is_rewatching"`
NumEpisodesWatched int `json:"num_episodes_watched"`
Score int `json:"score"`
UpdatedAt string `json:"updated_at"`
} `json:"list_status"`
}
)
func (w *Wrapper) GetAnimeDetails(mId int) (*BasicAnime, error) {
w.logger.Debug().Int("mId", mId).Msg("mal: Getting anime details")
reqUrl := fmt.Sprintf("%s/anime/%d?fields=%s", ApiBaseURL, mId, BaseAnimeFields)
if w.AccessToken == "" {
return nil, fmt.Errorf("access token is empty")
}
var anime BasicAnime
err := w.doQuery("GET", reqUrl, nil, "application/json", &anime)
if err != nil {
w.logger.Error().Err(err).Int("mId", mId).Msg("mal: Failed to get anime details")
return nil, err
}
w.logger.Info().Int("mId", mId).Msg("mal: Fetched anime details")
return &anime, nil
}
func (w *Wrapper) GetAnimeCollection() ([]*AnimeListEntry, error) {
w.logger.Debug().Msg("mal: Getting anime collection")
reqUrl := fmt.Sprintf("%s/users/@me/animelist?fields=list_status&limit=1000", ApiBaseURL)
type response struct {
Data []*AnimeListEntry `json:"data"`
}
var data response
err := w.doQuery("GET", reqUrl, nil, "application/json", &data)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to get anime collection")
return nil, err
}
w.logger.Info().Msg("mal: Fetched anime collection")
return data.Data, nil
}
type AnimeListProgressParams struct {
NumEpisodesWatched *int
}
func (w *Wrapper) UpdateAnimeProgress(opts *AnimeListProgressParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating anime progress")
// Get anime details
anime, err := w.GetAnimeDetails(mId)
if err != nil {
return err
}
status := MediaListStatusWatching
if anime.Status == MediaStatusFinishedAiring && anime.NumEpisodes > 0 && anime.NumEpisodes <= *opts.NumEpisodesWatched {
status = MediaListStatusCompleted
}
if anime.NumEpisodes > 0 && *opts.NumEpisodesWatched > anime.NumEpisodes {
*opts.NumEpisodesWatched = anime.NumEpisodes
}
// Update MAL list entry
err = w.UpdateAnimeListStatus(&AnimeListStatusParams{
Status: &status,
NumEpisodesWatched: opts.NumEpisodesWatched,
}, mId)
if err == nil {
w.logger.Info().Int("mId", mId).Msg("mal: Updated anime progress")
}
return err
}
type AnimeListStatusParams struct {
Status *MediaListStatus
IsRewatching *bool
NumEpisodesWatched *int
Score *int
}
func (w *Wrapper) UpdateAnimeListStatus(opts *AnimeListStatusParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating anime list status")
reqUrl := fmt.Sprintf("%s/anime/%d/my_list_status", ApiBaseURL, mId)
// Build URL
urlData := url.Values{}
if opts.Status != nil {
urlData.Set("status", string(*opts.Status))
}
if opts.IsRewatching != nil {
urlData.Set("is_rewatching", fmt.Sprintf("%t", *opts.IsRewatching))
}
if opts.NumEpisodesWatched != nil {
urlData.Set("num_watched_episodes", fmt.Sprintf("%d", *opts.NumEpisodesWatched))
}
if opts.Score != nil {
urlData.Set("score", fmt.Sprintf("%d", *opts.Score))
}
encodedData := urlData.Encode()
err := w.doMutation("PATCH", reqUrl, encodedData)
if err != nil {
w.logger.Error().Err(err).Int("mId", mId).Msg("mal: Failed to update anime list status")
return err
}
return nil
}
func (w *Wrapper) DeleteAnimeListItem(mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Deleting anime list item")
reqUrl := fmt.Sprintf("%s/anime/%d/my_list_status", ApiBaseURL, mId)
err := w.doMutation("DELETE", reqUrl, "")
if err != nil {
w.logger.Error().Err(err).Int("mId", mId).Msg("mal: Failed to delete anime list item")
return err
}
w.logger.Info().Int("mId", mId).Msg("mal: Deleted anime list item")
return nil
}

View File

@@ -0,0 +1,62 @@
package mal
import (
"github.com/davecgh/go-spew/spew"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestGetAnimeDetails(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetAnimeDetails(51179)
spew.Dump(res)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
t.Log(res.Title)
}
func TestGetAnimeCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetAnimeCollection()
if err != nil {
t.Fatalf("error while fetching anime collection, %v", err)
}
for _, entry := range res {
t.Log(entry.Node.Title)
if entry.Node.ID == 51179 {
spew.Dump(entry)
}
}
}
func TestUpdateAnimeListStatus(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList(), test_utils.MyAnimeListMutation())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
mId := 51179
progress := 2
status := MediaListStatusWatching
err := malWrapper.UpdateAnimeListStatus(&AnimeListStatusParams{
Status: &status,
NumEpisodesWatched: &progress,
}, mId)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
}

View File

@@ -0,0 +1,185 @@
package mal
import (
"fmt"
"net/url"
)
const (
BaseMangaFields string = "id,title,main_picture,alternative_titles,start_date,end_date,nsfw,synopsis,num_volumes,num_chapters,mean,rank,popularity,media_type,status"
)
type (
BasicManga struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
AlternativeTitles struct {
Synonyms []string `json:"synonyms"`
En string `json:"en"`
Ja string `json:"ja"`
} `json:"alternative_titles"`
StartDate string `json:"start_date"`
EndDate string `json:"end_date"`
Synopsis string `json:"synopsis"`
NSFW string `json:"nsfw"`
NumVolumes int `json:"num_volumes"`
NumChapters int `json:"num_chapters"`
Mean float32 `json:"mean"`
Rank int `json:"rank"`
Popularity int `json:"popularity"`
MediaType MediaType `json:"media_type"`
Status MediaStatus `json:"status"`
}
MangaListEntry struct {
Node struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
} `json:"node"`
ListStatus struct {
Status MediaListStatus `json:"status"`
IsRereading bool `json:"is_rereading"`
NumVolumesRead int `json:"num_volumes_read"`
NumChaptersRead int `json:"num_chapters_read"`
Score int `json:"score"`
UpdatedAt string `json:"updated_at"`
} `json:"list_status"`
}
)
func (w *Wrapper) GetMangaDetails(mId int) (*BasicManga, error) {
w.logger.Debug().Int("mId", mId).Msg("mal: Getting manga details")
reqUrl := fmt.Sprintf("%s/manga/%d?fields=%s", ApiBaseURL, mId, BaseMangaFields)
if w.AccessToken == "" {
return nil, fmt.Errorf("access token is empty")
}
var manga BasicManga
err := w.doQuery("GET", reqUrl, nil, "application/json", &manga)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to get manga details")
return nil, err
}
w.logger.Info().Int("mId", mId).Msg("mal: Fetched manga details")
return &manga, nil
}
func (w *Wrapper) GetMangaCollection() ([]*MangaListEntry, error) {
w.logger.Debug().Msg("mal: Getting manga collection")
reqUrl := fmt.Sprintf("%s/users/@me/mangalist?fields=list_status&limit=1000", ApiBaseURL)
type response struct {
Data []*MangaListEntry `json:"data"`
}
var data response
err := w.doQuery("GET", reqUrl, nil, "application/json", &data)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to get manga collection")
return nil, err
}
w.logger.Info().Msg("mal: Fetched manga collection")
return data.Data, nil
}
type MangaListProgressParams struct {
NumChaptersRead *int
}
func (w *Wrapper) UpdateMangaProgress(opts *MangaListProgressParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating manga progress")
// Get manga details
manga, err := w.GetMangaDetails(mId)
if err != nil {
return err
}
status := MediaListStatusReading
if manga.Status == MediaStatusFinished && manga.NumChapters > 0 && manga.NumChapters <= *opts.NumChaptersRead {
status = MediaListStatusCompleted
}
if manga.NumChapters > 0 && *opts.NumChaptersRead > manga.NumChapters {
*opts.NumChaptersRead = manga.NumChapters
}
// Update MAL list entry
err = w.UpdateMangaListStatus(&MangaListStatusParams{
Status: &status,
NumChaptersRead: opts.NumChaptersRead,
}, mId)
if err == nil {
w.logger.Info().Int("mId", mId).Msg("mal: Updated manga progress")
}
return err
}
type MangaListStatusParams struct {
Status *MediaListStatus
IsRereading *bool
NumChaptersRead *int
Score *int
}
func (w *Wrapper) UpdateMangaListStatus(opts *MangaListStatusParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating manga list status")
reqUrl := fmt.Sprintf("%s/manga/%d/my_list_status", ApiBaseURL, mId)
// Build URL
urlData := url.Values{}
if opts.Status != nil {
urlData.Set("status", string(*opts.Status))
}
if opts.IsRereading != nil {
urlData.Set("is_rereading", fmt.Sprintf("%t", *opts.IsRereading))
}
if opts.NumChaptersRead != nil {
urlData.Set("num_chapters_read", fmt.Sprintf("%d", *opts.NumChaptersRead))
}
if opts.Score != nil {
urlData.Set("score", fmt.Sprintf("%d", *opts.Score))
}
encodedData := urlData.Encode()
err := w.doMutation("PATCH", reqUrl, encodedData)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to update manga list status")
return err
}
return nil
}
func (w *Wrapper) DeleteMangaListItem(mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Deleting manga list item")
reqUrl := fmt.Sprintf("%s/manga/%d/my_list_status", ApiBaseURL, mId)
err := w.doMutation("DELETE", reqUrl, "")
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to delete manga list item")
return err
}
w.logger.Info().Int("mId", mId).Msg("mal: Deleted manga list item")
return nil
}

View File

@@ -0,0 +1,62 @@
package mal
import (
"github.com/davecgh/go-spew/spew"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestGetMangaDetails(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetMangaDetails(13)
spew.Dump(res)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
t.Log(res.Title)
}
func TestGetMangaCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetMangaCollection()
if err != nil {
t.Fatalf("error while fetching anime collection, %v", err)
}
for _, entry := range res {
t.Log(entry.Node.Title)
if entry.Node.ID == 13 {
spew.Dump(entry)
}
}
}
func TestUpdateMangaListStatus(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList(), test_utils.MyAnimeListMutation())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
mId := 13
progress := 1000
status := MediaListStatusReading
err := malWrapper.UpdateMangaListStatus(&MangaListStatusParams{
Status: &status,
NumChaptersRead: &progress,
}, mId)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
}

View File

@@ -0,0 +1,232 @@
package mal
import (
"errors"
"fmt"
"github.com/goccy/go-json"
"github.com/samber/lo"
"io"
"math"
"net/http"
"net/url"
"regexp"
"seanime/internal/util/comparison"
"seanime/internal/util/result"
"sort"
"strings"
)
type (
SearchResultPayload struct {
MediaType string `json:"media_type"`
StartYear int `json:"start_year"`
Aired string `json:"aired,omitempty"`
Score string `json:"score"`
Status string `json:"status"`
}
SearchResultAnime struct {
ID int `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
URL string `json:"url"`
ImageURL string `json:"image_url"`
ThumbnailURL string `json:"thumbnail_url"`
Payload *SearchResultPayload `json:"payload"`
ESScore float64 `json:"es_score"`
}
SearchResult struct {
Categories []*struct {
Type string `json:"type"`
Items []*SearchResultAnime `json:"items"`
} `json:"categories"`
}
SearchCache struct {
*result.Cache[int, *SearchResultAnime]
}
)
//----------------------------------------------------------------------------------------------------------------------
// SearchWithMAL uses MAL's search API to find suggestions that match the title provided.
func SearchWithMAL(title string, slice int) ([]*SearchResultAnime, error) {
url := "https://myanimelist.net/search/prefix.json?type=anime&v=1&keyword=" + url.QueryEscape(title)
res, err := http.Get(url)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return nil, fmt.Errorf("request failed with status code: %d", res.StatusCode)
}
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
var bodyMap SearchResult
err = json.Unmarshal(body, &bodyMap)
if err != nil {
return nil, fmt.Errorf("unmarshaling error: %v", err)
}
if bodyMap.Categories == nil {
return nil, fmt.Errorf("missing 'categories' in response")
}
items := make([]*SearchResultAnime, 0)
for _, cat := range bodyMap.Categories {
if cat.Type == "anime" {
items = append(items, cat.Items...)
}
}
if len(items) > slice {
return items[:slice], nil
}
return items, nil
}
// AdvancedSearchWithMAL is like SearchWithMAL, but it uses additional algorithms to find the best match.
func AdvancedSearchWithMAL(title string) (*SearchResultAnime, error) {
if len(title) == 0 {
return nil, fmt.Errorf("title is empty")
}
// trim the title
title = strings.ToLower(strings.TrimSpace(title))
// MAL typically doesn't use "cour"
re := regexp.MustCompile(`\bcour\b`)
title = re.ReplaceAllString(title, "part")
// fetch suggestions from MAL
suggestions, err := SearchWithMAL(title, 8)
if err != nil {
return nil, err
}
// sort the suggestions by score
sort.Slice(suggestions, func(i, j int) bool {
return suggestions[i].ESScore > suggestions[j].ESScore
})
// keep anime that have aired
suggestions = lo.Filter(suggestions, func(n *SearchResultAnime, index int) bool {
return n.ESScore >= 0.1 && n.Payload.Status != "Not yet aired"
})
// reduce score if anime is older than 2006
suggestions = lo.Map(suggestions, func(n *SearchResultAnime, index int) *SearchResultAnime {
if n.Payload.StartYear < 2006 {
n.ESScore -= 0.1
}
return n
})
tparts := strings.Fields(title)
tsub := tparts[0]
if len(tparts) > 1 {
tsub += " " + tparts[1]
}
tsub = strings.TrimSpace(tsub)
//
t1, foundT1 := lo.Find(suggestions, func(n *SearchResultAnime) bool {
nTitle := strings.ToLower(n.Name)
_tsub := tparts[0]
if len(tparts) > 1 {
_tsub += " " + tparts[1]
}
_tsub = strings.TrimSpace(_tsub)
re := regexp.MustCompile(`\b(film|movie|season|part|(s\d{2}e?))\b`)
return strings.HasPrefix(nTitle, tsub) && n.Payload.MediaType == "TV" && !re.MatchString(nTitle)
})
// very generous
t2, foundT2 := lo.Find(suggestions, func(n *SearchResultAnime) bool {
nTitle := strings.ToLower(n.Name)
_tsub := tparts[0]
re := regexp.MustCompile(`\b(film|movie|season|part|(s\d{2}e?))\b`)
return strings.HasPrefix(nTitle, _tsub) && n.Payload.MediaType == "TV" && !re.MatchString(nTitle)
})
levResult, found := comparison.FindBestMatchWithLevenshtein(&title, lo.Map(suggestions, func(n *SearchResultAnime, index int) *string { return &n.Name }))
if !found {
return nil, errors.New("couldn't find a suggestion from levenshtein")
}
levSuggestion, found := lo.Find(suggestions, func(n *SearchResultAnime) bool {
return strings.ToLower(n.Name) == strings.ToLower(*levResult.Value)
})
if !found {
return nil, errors.New("couldn't locate lenshtein result")
}
if foundT1 {
d, found := comparison.FindBestMatchWithLevenshtein(&tsub, []*string{&title, new(string)})
if found && len(*d.Value) > 0 {
if d.Distance <= 1 {
return t1, nil
}
}
}
// Strong correlation using MAL
if suggestions[0].ESScore >= 4.5 {
return suggestions[0], nil
}
// Very Likely match using distance
if levResult.Distance <= 4 {
return levSuggestion, nil
}
if suggestions[0].ESScore < 5 {
// Likely match using [startsWith]
if foundT1 {
dev := math.Abs(t1.ESScore-suggestions[0].ESScore) < 2.0
if len(tsub) > 6 && dev {
return t1, nil
}
}
// Likely match using [startsWith]
if foundT2 {
dev := math.Abs(t2.ESScore-suggestions[0].ESScore) < 2.0
if len(tparts[0]) > 6 && dev {
return t2, nil
}
}
// Likely match using distance
if levSuggestion.ESScore >= 1 && !(suggestions[0].ESScore > 3) {
return suggestions[0], nil
}
// Less than likely match using MAL
return suggestions[0], nil
}
// Distance above threshold, falling back to first MAL suggestion above
if levResult.Distance >= 5 && suggestions[0].ESScore >= 1 {
return suggestions[0], nil
}
return nil, nil
}

View File

@@ -0,0 +1,34 @@
package mal
import (
"seanime/internal/test_utils"
"testing"
)
func TestSearchWithMAL(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
res, err := SearchWithMAL("bungo stray dogs", 4)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
for _, m := range res {
t.Log(m.Name)
}
}
func TestAdvancedSearchWithMal(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
res, err := AdvancedSearchWithMAL("sousou no frieren")
if err != nil {
t.Fatal("expected result, got error: ", err)
}
t.Log(res.Name)
}

View File

@@ -0,0 +1,40 @@
package mal
import "time"
type (
RequestOptions struct {
AccessToken string
RefreshToken string
ExpiresAt time.Time
}
MediaType string
MediaStatus string
MediaListStatus string
)
const (
MediaTypeTV MediaType = "tv" // Anime
MediaTypeOVA MediaType = "ova" // Anime
MediaTypeMovie MediaType = "movie" // Anime
MediaTypeSpecial MediaType = "special" // Anime
MediaTypeONA MediaType = "ona" // Anime
MediaTypeMusic MediaType = "music"
MediaTypeManga MediaType = "manga" // Manga
MediaTypeNovel MediaType = "novel" // Manga
MediaTypeOneShot MediaType = "oneshot" // Manga
MediaStatusFinishedAiring MediaStatus = "finished_airing" // Anime
MediaStatusCurrentlyAiring MediaStatus = "currently_airing" // Anime
MediaStatusNotYetAired MediaStatus = "not_yet_aired" // Anime
MediaStatusFinished MediaStatus = "finished" // Manga
MediaStatusCurrentlyPublishing MediaStatus = "currently_publishing" // Manga
MediaStatusNotYetPublished MediaStatus = "not_yet_published" // Manga
MediaListStatusReading MediaListStatus = "reading" // Manga
MediaListStatusWatching MediaListStatus = "watching" // Anime
MediaListStatusCompleted MediaListStatus = "completed"
MediaListStatusOnHold MediaListStatus = "on_hold"
MediaListStatusDropped MediaListStatus = "dropped"
MediaListStatusPlanToWatch MediaListStatus = "plan_to_watch" // Anime
MediaListStatusPlanToRead MediaListStatus = "plan_to_read" // Manga
)

View File

@@ -0,0 +1,160 @@
package mal
import (
"fmt"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
"io"
"net/http"
"net/url"
"seanime/internal/database/db"
"seanime/internal/database/models"
"strings"
"time"
)
const (
ApiBaseURL string = "https://api.myanimelist.net/v2"
)
type (
Wrapper struct {
AccessToken string
client *http.Client
logger *zerolog.Logger
}
)
func NewWrapper(accessToken string, logger *zerolog.Logger) *Wrapper {
return &Wrapper{
AccessToken: accessToken,
client: &http.Client{},
logger: logger,
}
}
func (w *Wrapper) doQuery(method, uri string, body io.Reader, contentType string, data interface{}) error {
req, err := http.NewRequest(method, uri, body)
if err != nil {
return err
}
req.Header.Add("Content-Type", contentType)
req.Header.Add("Authorization", "Bearer "+w.AccessToken)
// Make the HTTP request
resp, err := w.client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if !((resp.StatusCode >= 200) && (resp.StatusCode <= 299)) {
return fmt.Errorf("invalid response status %s", resp.Status)
}
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
return err
}
return nil
}
func (w *Wrapper) doMutation(method, uri, encodedParams string) error {
var reader io.Reader
reader = nil
if encodedParams != "" {
reader = strings.NewReader(encodedParams)
}
req, err := http.NewRequest(method, uri, reader)
if err != nil {
return err
}
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Authorization", "Bearer "+w.AccessToken)
// Make the HTTP request
resp, err := w.client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if !((resp.StatusCode >= 200) && (resp.StatusCode <= 299)) {
return fmt.Errorf("invalid response status %s", resp.Status)
}
return nil
}
func VerifyMALAuth(malInfo *models.Mal, db *db.Database, logger *zerolog.Logger) (*models.Mal, error) {
// Token has not expired
if malInfo.TokenExpiresAt.After(time.Now()) {
logger.Debug().Msg("mal: Token is still valid")
return malInfo, nil
}
// Token is expired, refresh it
client := &http.Client{}
// Build URL
urlData := url.Values{}
urlData.Set("grant_type", "refresh_token")
urlData.Set("refresh_token", malInfo.RefreshToken)
encodedData := urlData.Encode()
req, err := http.NewRequest("POST", "https://myanimelist.net/v1/oauth2/token", strings.NewReader(encodedData))
if err != nil {
logger.Error().Err(err).Msg("mal: Failed to create request")
return malInfo, err
}
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Authorization", "Basic "+malInfo.AccessToken)
// Response
res, err := client.Do(req)
if err != nil {
logger.Error().Err(err).Msg("mal: Failed to refresh token")
return malInfo, err
}
defer res.Body.Close()
type malAuthResponse struct {
AccessToken string `json:"access_token"`
RefreshToken string `json:"refresh_token"`
ExpiresIn int32 `json:"expires_in"`
TokenType string `json:"token_type"`
}
ret := malAuthResponse{}
if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
return malInfo, err
}
if ret.AccessToken == "" {
logger.Error().Msgf("mal: Failed to refresh token %s", res.Status)
return malInfo, fmt.Errorf("mal: Failed to refresh token %s", res.Status)
}
// Save
updatedMalInfo := models.Mal{
BaseModel: models.BaseModel{
ID: 1,
UpdatedAt: time.Now(),
},
Username: "",
AccessToken: ret.AccessToken,
RefreshToken: ret.RefreshToken,
TokenExpiresAt: time.Now().Add(time.Duration(ret.ExpiresIn) * time.Second),
}
_, err = db.UpsertMalInfo(&updatedMalInfo)
if err != nil {
logger.Error().Err(err).Msg("mal: Failed to save updated MAL info")
return malInfo, err
}
logger.Info().Msg("mal: Refreshed token")
return &updatedMalInfo, nil
}

View File

@@ -0,0 +1,65 @@
package mangaupdates
import (
"bytes"
"github.com/davecgh/go-spew/spew"
"github.com/goccy/go-json"
"github.com/stretchr/testify/require"
"net/http"
"strings"
"testing"
"time"
)
func TestApi(t *testing.T) {
tests := []struct {
title string
startDate string
}{
{
title: "Dandadan",
startDate: "2021-04-06",
},
}
type searchReleaseBody struct {
Search string `json:"search"`
StartDate string `json:"start_date,omitempty"`
}
var apiUrl = "https://api.mangaupdates.com/v1/releases/search"
for _, test := range tests {
t.Run(test.title, func(t *testing.T) {
client := http.Client{Timeout: 10 * time.Second}
body := searchReleaseBody{
Search: strings.ToLower(test.title),
StartDate: test.startDate,
}
bodyB, err := json.Marshal(body)
require.NoError(t, err)
req, err := http.NewRequest("POST", apiUrl, bytes.NewBuffer(bodyB))
require.NoError(t, err)
req.Header.Set("Content-Type", "application/json")
resp, err := client.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
var result interface{}
err = json.NewDecoder(resp.Body).Decode(&result)
require.NoError(t, err)
spew.Dump(result)
})
}
}

View File

@@ -0,0 +1,144 @@
package metadata
import (
"regexp"
"seanime/internal/api/anilist"
"seanime/internal/hook"
"seanime/internal/util"
"seanime/internal/util/filecache"
"strconv"
"github.com/rs/zerolog"
"github.com/samber/mo"
)
type (
AnimeWrapperImpl struct {
metadata mo.Option[*AnimeMetadata]
baseAnime *anilist.BaseAnime
fileCacher *filecache.Cacher
logger *zerolog.Logger
}
)
func (aw *AnimeWrapperImpl) GetEpisodeMetadata(epNum int) (ret EpisodeMetadata) {
if aw == nil || aw.baseAnime == nil {
return
}
ret = EpisodeMetadata{
AnidbId: 0,
TvdbId: 0,
Title: "",
Image: "",
AirDate: "",
Length: 0,
Summary: "",
Overview: "",
EpisodeNumber: epNum,
Episode: strconv.Itoa(epNum),
SeasonNumber: 0,
AbsoluteEpisodeNumber: 0,
AnidbEid: 0,
}
defer util.HandlePanicInModuleThen("api/metadata/GetEpisodeMetadata", func() {})
reqEvent := &AnimeEpisodeMetadataRequestedEvent{}
reqEvent.MediaId = aw.baseAnime.GetID()
reqEvent.EpisodeNumber = epNum
reqEvent.EpisodeMetadata = &ret
_ = hook.GlobalHookManager.OnAnimeEpisodeMetadataRequested().Trigger(reqEvent)
epNum = reqEvent.EpisodeNumber
// Default prevented by hook, return the metadata
if reqEvent.DefaultPrevented {
if reqEvent.EpisodeMetadata == nil {
return ret
}
return *reqEvent.EpisodeMetadata
}
//
// Process
//
episode := mo.None[*EpisodeMetadata]()
if aw.metadata.IsAbsent() {
ret.Image = aw.baseAnime.GetBannerImageSafe()
} else {
episodeF, found := aw.metadata.MustGet().FindEpisode(strconv.Itoa(epNum))
if found {
episode = mo.Some(episodeF)
}
}
// If we don't have Animap metadata, just return the metadata containing the image
if episode.IsAbsent() {
return ret
}
ret = *episode.MustGet()
// If TVDB image is not set, use Animap image, if that is not set, use the AniList banner image
if ret.Image == "" {
// Set Animap image if TVDB image is not set
if episode.MustGet().Image != "" {
ret.Image = episode.MustGet().Image
} else {
// If Animap image is not set, use the base media image
ret.Image = aw.baseAnime.GetBannerImageSafe()
}
}
// Event
event := &AnimeEpisodeMetadataEvent{
EpisodeMetadata: &ret,
EpisodeNumber: epNum,
MediaId: aw.baseAnime.GetID(),
}
_ = hook.GlobalHookManager.OnAnimeEpisodeMetadata().Trigger(event)
if event.EpisodeMetadata == nil {
return ret
}
ret = *event.EpisodeMetadata
return ret
}
func ExtractEpisodeInteger(s string) (int, bool) {
pattern := "[0-9]+"
regex := regexp.MustCompile(pattern)
// Find the first match in the input string.
match := regex.FindString(s)
if match != "" {
// Convert the matched string to an integer.
num, err := strconv.Atoi(match)
if err != nil {
return 0, false
}
return num, true
}
return 0, false
}
func OffsetAnidbEpisode(s string, offset int) string {
pattern := "([0-9]+)"
regex := regexp.MustCompile(pattern)
// Replace the first matched integer with the incremented value.
result := regex.ReplaceAllStringFunc(s, func(matched string) string {
num, err := strconv.Atoi(matched)
if err == nil {
num = num + offset
return strconv.Itoa(num)
} else {
return matched
}
})
return result
}

View File

@@ -0,0 +1,26 @@
package metadata
import (
"testing"
)
func TestOffsetEpisode(t *testing.T) {
cases := []struct {
input string
expected string
}{
{"S1", "S2"},
{"OP1", "OP2"},
{"1", "2"},
{"OP", "OP"},
}
for _, c := range cases {
actual := OffsetAnidbEpisode(c.input, 1)
if actual != c.expected {
t.Errorf("OffsetAnidbEpisode(%s, 1) == %s, expected %s", c.input, actual, c.expected)
}
}
}

View File

@@ -0,0 +1,47 @@
package metadata
import "seanime/internal/hook_resolver"
// AnimeMetadataRequestedEvent is triggered when anime metadata is requested and right before the metadata is processed.
// This event is followed by [AnimeMetadataEvent] which is triggered when the metadata is available.
// Prevent default to skip the default behavior and return the modified metadata.
// If the modified metadata is nil, an error will be returned.
type AnimeMetadataRequestedEvent struct {
hook_resolver.Event
MediaId int `json:"mediaId"`
// Empty metadata object, will be used if the hook prevents the default behavior
AnimeMetadata *AnimeMetadata `json:"animeMetadata"`
}
// AnimeMetadataEvent is triggered when anime metadata is available and is about to be returned.
// Anime metadata can be requested in many places, ranging from displaying the anime entry to starting a torrent stream.
// This event is triggered after [AnimeMetadataRequestedEvent].
// If the modified metadata is nil, an error will be returned.
type AnimeMetadataEvent struct {
hook_resolver.Event
MediaId int `json:"mediaId"`
AnimeMetadata *AnimeMetadata `json:"animeMetadata"`
}
// AnimeEpisodeMetadataRequestedEvent is triggered when anime episode metadata is requested.
// Prevent default to skip the default behavior and return the overridden metadata.
// This event is triggered before [AnimeEpisodeMetadataEvent].
// If the modified episode metadata is nil, an empty EpisodeMetadata object will be returned.
type AnimeEpisodeMetadataRequestedEvent struct {
hook_resolver.Event
// Empty metadata object, will be used if the hook prevents the default behavior
EpisodeMetadata *EpisodeMetadata `json:"animeEpisodeMetadata"`
EpisodeNumber int `json:"episodeNumber"`
MediaId int `json:"mediaId"`
}
// AnimeEpisodeMetadataEvent is triggered when anime episode metadata is available and is about to be returned.
// In the current implementation, episode metadata is requested for display purposes. It is used to get a more complete metadata object since the original AnimeMetadata object is not complete.
// This event is triggered after [AnimeEpisodeMetadataRequestedEvent].
// If the modified episode metadata is nil, an empty EpisodeMetadata object will be returned.
type AnimeEpisodeMetadataEvent struct {
hook_resolver.Event
EpisodeMetadata *EpisodeMetadata `json:"animeEpisodeMetadata"`
EpisodeNumber int `json:"episodeNumber"`
MediaId int `json:"mediaId"`
}

View File

@@ -0,0 +1,18 @@
package metadata
import (
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
"github.com/stretchr/testify/require"
)
func GetMockProvider(t *testing.T) Provider {
filecacher, err := filecache.NewCacher(t.TempDir())
require.NoError(t, err)
return NewProvider(&NewProviderImplOptions{
Logger: util.NewLogger(),
FileCacher: filecacher,
})
}

View File

@@ -0,0 +1,212 @@
package metadata
import (
"errors"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/animap"
"seanime/internal/hook"
"seanime/internal/util/filecache"
"seanime/internal/util/result"
"strings"
"time"
"github.com/rs/zerolog"
"github.com/samber/mo"
"golang.org/x/sync/singleflight"
)
type (
ProviderImpl struct {
logger *zerolog.Logger
fileCacher *filecache.Cacher
animeMetadataCache *result.BoundedCache[string, *AnimeMetadata]
singleflight *singleflight.Group
}
NewProviderImplOptions struct {
Logger *zerolog.Logger
FileCacher *filecache.Cacher
}
)
func GetAnimeMetadataCacheKey(platform Platform, mId int) string {
return fmt.Sprintf("%s$%d", platform, mId)
}
// NewProvider creates a new metadata provider.
func NewProvider(options *NewProviderImplOptions) Provider {
return &ProviderImpl{
logger: options.Logger,
fileCacher: options.FileCacher,
animeMetadataCache: result.NewBoundedCache[string, *AnimeMetadata](100),
singleflight: &singleflight.Group{},
}
}
// GetCache returns the anime metadata cache.
func (p *ProviderImpl) GetCache() *result.BoundedCache[string, *AnimeMetadata] {
return p.animeMetadataCache
}
// GetAnimeMetadata fetches anime metadata from api.ani.zip.
func (p *ProviderImpl) GetAnimeMetadata(platform Platform, mId int) (ret *AnimeMetadata, err error) {
cacheKey := GetAnimeMetadataCacheKey(platform, mId)
if cached, ok := p.animeMetadataCache.Get(cacheKey); ok {
return cached, nil
}
res, err, _ := p.singleflight.Do(cacheKey, func() (interface{}, error) {
return p.fetchAnimeMetadata(platform, mId)
})
if err != nil {
return nil, err
}
return res.(*AnimeMetadata), nil
}
func (p *ProviderImpl) fetchAnimeMetadata(platform Platform, mId int) (*AnimeMetadata, error) {
ret := &AnimeMetadata{
Titles: make(map[string]string),
Episodes: make(map[string]*EpisodeMetadata),
EpisodeCount: 0,
SpecialCount: 0,
Mappings: &AnimeMappings{},
}
// Invoke AnimeMetadataRequested hook
reqEvent := &AnimeMetadataRequestedEvent{
MediaId: mId,
AnimeMetadata: ret,
}
err := hook.GlobalHookManager.OnAnimeMetadataRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
mId = reqEvent.MediaId
// Default prevented by hook, return the metadata
if reqEvent.DefaultPrevented {
// Override the metadata
ret = reqEvent.AnimeMetadata
// Trigger the event
event := &AnimeMetadataEvent{
MediaId: mId,
AnimeMetadata: ret,
}
err = hook.GlobalHookManager.OnAnimeMetadata().Trigger(event)
if err != nil {
return nil, err
}
ret = event.AnimeMetadata
mId = event.MediaId
if ret == nil {
return nil, errors.New("no metadata was returned")
}
p.animeMetadataCache.SetT(GetAnimeMetadataCacheKey(platform, mId), ret, 1*time.Hour)
return ret, nil
}
m, err := animap.FetchAnimapMedia(string(platform), mId)
if err != nil || m == nil {
//return p.AnizipFallback(platform, mId)
return nil, err
}
ret.Titles = m.Titles
ret.EpisodeCount = 0
ret.SpecialCount = 0
ret.Mappings.AnimeplanetId = m.Mappings.AnimePlanetID
ret.Mappings.KitsuId = m.Mappings.KitsuID
ret.Mappings.MalId = m.Mappings.MalID
ret.Mappings.Type = m.Mappings.Type
ret.Mappings.AnilistId = m.Mappings.AnilistID
ret.Mappings.AnisearchId = m.Mappings.AnisearchID
ret.Mappings.AnidbId = m.Mappings.AnidbID
ret.Mappings.NotifymoeId = m.Mappings.NotifyMoeID
ret.Mappings.LivechartId = m.Mappings.LivechartID
ret.Mappings.ThetvdbId = m.Mappings.TheTvdbID
ret.Mappings.ImdbId = ""
ret.Mappings.ThemoviedbId = m.Mappings.TheMovieDbID
for key, ep := range m.Episodes {
firstChar := key[0]
if firstChar == 'S' {
ret.SpecialCount++
} else {
if firstChar >= '0' && firstChar <= '9' {
ret.EpisodeCount++
}
}
em := &EpisodeMetadata{
AnidbId: ep.AnidbId,
TvdbId: ep.TvdbId,
Title: ep.AnidbTitle,
Image: ep.Image,
AirDate: ep.AirDate,
Length: ep.Runtime,
Summary: strings.ReplaceAll(ep.Overview, "`", "'"),
Overview: strings.ReplaceAll(ep.Overview, "`", "'"),
EpisodeNumber: ep.Number,
Episode: key,
SeasonNumber: ep.SeasonNumber,
AbsoluteEpisodeNumber: ep.AbsoluteNumber,
AnidbEid: ep.AnidbId,
HasImage: ep.Image != "",
}
if em.Length == 0 && ep.Runtime > 0 {
em.Length = ep.Runtime
}
if em.Summary == "" && ep.Overview != "" {
em.Summary = ep.Overview
}
if em.Overview == "" && ep.Overview != "" {
em.Overview = ep.Overview
}
if ep.TvdbTitle != "" && ep.AnidbTitle == "Episode "+ep.AnidbEpisode {
em.Title = ep.TvdbTitle
}
ret.Episodes[key] = em
}
// Event
event := &AnimeMetadataEvent{
MediaId: mId,
AnimeMetadata: ret,
}
err = hook.GlobalHookManager.OnAnimeMetadata().Trigger(event)
if err != nil {
return nil, err
}
ret = event.AnimeMetadata
mId = event.MediaId
p.animeMetadataCache.SetT(GetAnimeMetadataCacheKey(platform, mId), ret, 1*time.Hour)
return ret, nil
}
// GetAnimeMetadataWrapper creates a new anime wrapper.
//
// Example:
//
// metadataProvider.GetAnimeMetadataWrapper(media, metadata)
// metadataProvider.GetAnimeMetadataWrapper(media, nil)
func (p *ProviderImpl) GetAnimeMetadataWrapper(media *anilist.BaseAnime, metadata *AnimeMetadata) AnimeMetadataWrapper {
aw := &AnimeWrapperImpl{
metadata: mo.None[*AnimeMetadata](),
baseAnime: media,
fileCacher: p.fileCacher,
logger: p.logger,
}
if metadata != nil {
aw.metadata = mo.Some(metadata)
}
return aw
}

View File

@@ -0,0 +1,165 @@
package metadata
import (
"seanime/internal/api/anilist"
"seanime/internal/util/result"
"strings"
"time"
)
const (
AnilistPlatform Platform = "anilist"
MalPlatform Platform = "mal"
)
type (
Platform string
Provider interface {
// GetAnimeMetadata fetches anime metadata for the given platform from a source.
// In this case, the source is api.ani.zip.
GetAnimeMetadata(platform Platform, mId int) (*AnimeMetadata, error)
GetCache() *result.BoundedCache[string, *AnimeMetadata]
// GetAnimeMetadataWrapper creates a wrapper for anime metadata.
GetAnimeMetadataWrapper(anime *anilist.BaseAnime, metadata *AnimeMetadata) AnimeMetadataWrapper
}
// AnimeMetadataWrapper is a container for anime metadata.
// This wrapper is used to get a more complete metadata object by getting data from multiple sources in the Provider.
// The user can request metadata to be fetched from TVDB as well, which will be stored in the cache.
AnimeMetadataWrapper interface {
// GetEpisodeMetadata combines metadata from multiple sources to create a single EpisodeMetadata object.
GetEpisodeMetadata(episodeNumber int) EpisodeMetadata
}
)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type (
AnimeMetadata struct {
Titles map[string]string `json:"titles"`
Episodes map[string]*EpisodeMetadata `json:"episodes"`
EpisodeCount int `json:"episodeCount"`
SpecialCount int `json:"specialCount"`
Mappings *AnimeMappings `json:"mappings"`
currentEpisodeCount int `json:"-"`
}
AnimeMappings struct {
AnimeplanetId string `json:"animeplanetId"`
KitsuId int `json:"kitsuId"`
MalId int `json:"malId"`
Type string `json:"type"`
AnilistId int `json:"anilistId"`
AnisearchId int `json:"anisearchId"`
AnidbId int `json:"anidbId"`
NotifymoeId string `json:"notifymoeId"`
LivechartId int `json:"livechartId"`
ThetvdbId int `json:"thetvdbId"`
ImdbId string `json:"imdbId"`
ThemoviedbId string `json:"themoviedbId"`
}
EpisodeMetadata struct {
AnidbId int `json:"anidbId"`
TvdbId int `json:"tvdbId"`
Title string `json:"title"`
Image string `json:"image"`
AirDate string `json:"airDate"`
Length int `json:"length"`
Summary string `json:"summary"`
Overview string `json:"overview"`
EpisodeNumber int `json:"episodeNumber"`
Episode string `json:"episode"`
SeasonNumber int `json:"seasonNumber"`
AbsoluteEpisodeNumber int `json:"absoluteEpisodeNumber"`
AnidbEid int `json:"anidbEid"`
HasImage bool `json:"hasImage"` // Indicates if the episode has a real image
}
)
func (m *AnimeMetadata) GetTitle() string {
if m == nil {
return ""
}
if len(m.Titles["en"]) > 0 {
return m.Titles["en"]
}
return m.Titles["ro"]
}
func (m *AnimeMetadata) GetMappings() *AnimeMappings {
if m == nil {
return &AnimeMappings{}
}
return m.Mappings
}
func (m *AnimeMetadata) FindEpisode(ep string) (*EpisodeMetadata, bool) {
if m.Episodes == nil {
return nil, false
}
episode, found := m.Episodes[ep]
if !found {
return nil, false
}
return episode, true
}
func (m *AnimeMetadata) GetMainEpisodeCount() int {
if m == nil {
return 0
}
return m.EpisodeCount
}
func (m *AnimeMetadata) GetCurrentEpisodeCount() int {
if m == nil {
return 0
}
if m.currentEpisodeCount > 0 {
return m.currentEpisodeCount
}
count := 0
for _, ep := range m.Episodes {
firstChar := ep.Episode[0]
if firstChar >= '0' && firstChar <= '9' {
// Check if aired
if ep.AirDate != "" {
date, err := time.Parse("2006-01-02", ep.AirDate)
if err == nil {
if date.Before(time.Now()) || date.Equal(time.Now()) {
count++
}
}
}
}
}
m.currentEpisodeCount = count
return count
}
// GetOffset returns the offset of the first episode relative to the absolute episode number.
// e.g, if the first episode's absolute number is 13, then the offset is 12.
func (m *AnimeMetadata) GetOffset() int {
if m == nil {
return 0
}
firstEp, found := m.FindEpisode("1")
if !found {
return 0
}
if firstEp.AbsoluteEpisodeNumber == 0 {
return 0
}
return firstEp.AbsoluteEpisodeNumber - 1
}
func (e *EpisodeMetadata) GetTitle() string {
if e == nil {
return ""
}
return strings.ReplaceAll(e.Title, "`", "'")
}

View File

@@ -0,0 +1,19 @@
package constants
import (
"seanime/internal/util"
"time"
)
const (
Version = "2.9.10"
VersionName = "Natsu"
GcTime = time.Minute * 30
ConfigFileName = "config.toml"
MalClientId = "51cb4294feb400f3ddc66a30f9b9a00f"
DiscordApplicationId = "1224777421941899285"
)
var DefaultExtensionMarketplaceURL = util.Decode("aHR0cHM6Ly9yYXcuZ2l0aHVidXNlcmNvbnRlbnQuY29tLzVyYWhpbS9zZWFuaW1lLWV4dGVuc2lvbnMvcmVmcy9oZWFkcy9tYWluL21hcmtldHBsYWNlLmpzb24=")
var AnnouncementURL = util.Decode("aHR0cHM6Ly9yYXcuZ2l0aHVidXNlcmNvbnRlbnQuY29tLzVyYWhpbS9oaWJpa2UvcmVmcy9oZWFkcy9tYWluL3B1YmxpYy9hbm5vdW5jZW1lbnRzLmpzb24=")
var InternalMetadataURL = util.Decode("aHR0cHM6Ly9hbmltZS5jbGFwLmluZw==")

View File

@@ -0,0 +1,418 @@
package continuity
import (
"fmt"
"seanime/internal/database/db_bridge"
"seanime/internal/hook"
"seanime/internal/library/anime"
"seanime/internal/util"
"seanime/internal/util/filecache"
"strconv"
"strings"
"time"
)
const (
MaxWatchHistoryItems = 100
IgnoreRatioThreshold = 0.9
WatchHistoryBucketName = "watch_history"
)
type (
// WatchHistory is a map of WatchHistoryItem.
// The key is the WatchHistoryItem.MediaId.
WatchHistory map[int]*WatchHistoryItem
// WatchHistoryItem are stored in the file cache.
// The history is used to resume playback from the last known position.
// Item.MediaId and Item.EpisodeNumber are used to identify the media and episode.
// Only one Item per MediaId should exist in the history.
WatchHistoryItem struct {
Kind Kind `json:"kind"`
// Used for MediastreamKind and ExternalPlayerKind.
Filepath string `json:"filepath"`
MediaId int `json:"mediaId"`
EpisodeNumber int `json:"episodeNumber"`
// The current playback time in seconds.
// Used to determine when to remove the item from the history.
CurrentTime float64 `json:"currentTime"`
// The duration of the media in seconds.
Duration float64 `json:"duration"`
// Timestamp of when the item was added to the history.
TimeAdded time.Time `json:"timeAdded"`
// TimeAdded is used in conjunction with TimeUpdated
// Timestamp of when the item was last updated.
// Used to determine when to remove the item from the history (First in, first out).
TimeUpdated time.Time `json:"timeUpdated"`
}
WatchHistoryItemResponse struct {
Item *WatchHistoryItem `json:"item"`
Found bool `json:"found"`
}
UpdateWatchHistoryItemOptions struct {
CurrentTime float64 `json:"currentTime"`
Duration float64 `json:"duration"`
MediaId int `json:"mediaId"`
EpisodeNumber int `json:"episodeNumber"`
Filepath string `json:"filepath,omitempty"`
Kind Kind `json:"kind"`
}
)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *Manager) GetWatchHistory() WatchHistory {
defer util.HandlePanicInModuleThen("continuity/GetWatchHistory", func() {})
m.mu.RLock()
defer m.mu.RUnlock()
items, err := filecache.GetAll[*WatchHistoryItem](m.fileCacher, *m.watchHistoryFileCacheBucket)
if err != nil {
m.logger.Error().Err(err).Msg("continuity: Failed to get watch history")
return nil
}
ret := make(WatchHistory)
for _, item := range items {
ret[item.MediaId] = item
}
return ret
}
func (m *Manager) GetWatchHistoryItem(mediaId int) *WatchHistoryItemResponse {
defer util.HandlePanicInModuleThen("continuity/GetWatchHistoryItem", func() {})
m.mu.RLock()
defer m.mu.RUnlock()
i, found := m.getWatchHistory(mediaId)
return &WatchHistoryItemResponse{
Item: i,
Found: found,
}
}
// UpdateWatchHistoryItem updates the WatchHistoryItem in the file cache.
func (m *Manager) UpdateWatchHistoryItem(opts *UpdateWatchHistoryItemOptions) (err error) {
defer util.HandlePanicInModuleWithError("continuity/UpdateWatchHistoryItem", &err)
m.mu.Lock()
defer m.mu.Unlock()
added := false
// Get the current history
i, found := m.getWatchHistory(opts.MediaId)
if !found {
added = true
i = &WatchHistoryItem{
Kind: opts.Kind,
Filepath: opts.Filepath,
MediaId: opts.MediaId,
EpisodeNumber: opts.EpisodeNumber,
CurrentTime: opts.CurrentTime,
Duration: opts.Duration,
TimeAdded: time.Now(),
TimeUpdated: time.Now(),
}
} else {
i.Kind = opts.Kind
i.EpisodeNumber = opts.EpisodeNumber
i.CurrentTime = opts.CurrentTime
i.Duration = opts.Duration
i.TimeUpdated = time.Now()
}
// Save the i
err = m.fileCacher.Set(*m.watchHistoryFileCacheBucket, strconv.Itoa(opts.MediaId), i)
if err != nil {
return fmt.Errorf("continuity: Failed to save watch history item: %w", err)
}
_ = hook.GlobalHookManager.OnWatchHistoryItemUpdated().Trigger(&WatchHistoryItemUpdatedEvent{
WatchHistoryItem: i,
})
// If the item was added, check if we need to remove the oldest item
if added {
_ = m.trimWatchHistoryItems()
}
return nil
}
func (m *Manager) DeleteWatchHistoryItem(mediaId int) (err error) {
defer util.HandlePanicInModuleWithError("continuity/DeleteWatchHistoryItem", &err)
m.mu.Lock()
defer m.mu.Unlock()
err = m.fileCacher.Delete(*m.watchHistoryFileCacheBucket, strconv.Itoa(mediaId))
if err != nil {
return fmt.Errorf("continuity: Failed to delete watch history item: %w", err)
}
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// GetExternalPlayerEpisodeWatchHistoryItem is called before launching the external player to get the last known position.
// Unlike GetWatchHistoryItem, this checks if the episode numbers match.
func (m *Manager) GetExternalPlayerEpisodeWatchHistoryItem(path string, isStream bool, episode, mediaId int) (ret *WatchHistoryItemResponse) {
defer util.HandlePanicInModuleThen("continuity/GetExternalPlayerEpisodeWatchHistoryItem", func() {})
m.mu.RLock()
defer m.mu.RUnlock()
if !m.settings.WatchContinuityEnabled {
return &WatchHistoryItemResponse{
Item: nil,
Found: false,
}
}
ret = &WatchHistoryItemResponse{
Item: nil,
Found: false,
}
m.logger.Debug().
Str("path", path).
Bool("isStream", isStream).
Int("episode", episode).
Int("mediaId", mediaId).
Msg("continuity: Retrieving watch history item")
// Normalize path
path = util.NormalizePath(path)
if isStream {
event := &WatchHistoryStreamEpisodeItemRequestedEvent{
WatchHistoryItem: &WatchHistoryItem{},
}
hook.GlobalHookManager.OnWatchHistoryStreamEpisodeItemRequested().Trigger(event)
if event.DefaultPrevented {
return &WatchHistoryItemResponse{
Item: event.WatchHistoryItem,
Found: event.WatchHistoryItem != nil,
}
}
if episode == 0 || mediaId == 0 {
m.logger.Debug().
Int("episode", episode).
Int("mediaId", mediaId).
Msg("continuity: No episode or media provided")
return
}
i, found := m.getWatchHistory(mediaId)
if !found || i.EpisodeNumber != episode {
m.logger.Trace().
Interface("item", i).
Msg("continuity: No watch history item found or episode number does not match")
return
}
m.logger.Debug().
Interface("item", i).
Msg("continuity: Watch history item found")
return &WatchHistoryItemResponse{
Item: i,
Found: found,
}
} else {
// Find the local file from the path
lfs, _, err := db_bridge.GetLocalFiles(m.db)
if err != nil {
return ret
}
event := &WatchHistoryLocalFileEpisodeItemRequestedEvent{
Path: path,
LocalFiles: lfs,
WatchHistoryItem: &WatchHistoryItem{},
}
hook.GlobalHookManager.OnWatchHistoryLocalFileEpisodeItemRequested().Trigger(event)
if event.DefaultPrevented {
return &WatchHistoryItemResponse{
Item: event.WatchHistoryItem,
Found: event.WatchHistoryItem != nil,
}
}
var lf *anime.LocalFile
// Find the local file from the path
for _, l := range lfs {
if l.GetNormalizedPath() == path {
lf = l
m.logger.Trace().Msg("continuity: Local file found from path")
break
}
}
// If the local file is not found, the path might be a filename (in the case of VLC)
if lf == nil {
for _, l := range lfs {
if strings.ToLower(l.Name) == path {
lf = l
m.logger.Trace().Msg("continuity: Local file found from filename")
break
}
}
}
if lf == nil || lf.MediaId == 0 || !lf.IsMain() {
m.logger.Trace().Msg("continuity: Local file not found or not main")
return
}
i, found := m.getWatchHistory(lf.MediaId)
if !found || i.EpisodeNumber != lf.GetEpisodeNumber() {
m.logger.Trace().
Interface("item", i).
Msg("continuity: No watch history item found or episode number does not match")
return
}
m.logger.Debug().
Interface("item", i).
Msg("continuity: Watch history item found")
return &WatchHistoryItemResponse{
Item: i,
Found: found,
}
}
}
func (m *Manager) UpdateExternalPlayerEpisodeWatchHistoryItem(currentTime, duration float64) {
defer util.HandlePanicInModuleThen("continuity/UpdateWatchHistoryItem", func() {})
m.mu.Lock()
defer m.mu.Unlock()
if !m.settings.WatchContinuityEnabled {
return
}
if m.externalPlayerEpisodeDetails.IsAbsent() {
return
}
added := false
opts, ok := m.externalPlayerEpisodeDetails.Get()
if !ok {
return
}
// Get the current history
i, found := m.getWatchHistory(opts.MediaId)
if !found {
added = true
i = &WatchHistoryItem{
Kind: ExternalPlayerKind,
Filepath: opts.Filepath,
MediaId: opts.MediaId,
EpisodeNumber: opts.EpisodeNumber,
CurrentTime: currentTime,
Duration: duration,
TimeAdded: time.Now(),
TimeUpdated: time.Now(),
}
} else {
i.Kind = ExternalPlayerKind
i.EpisodeNumber = opts.EpisodeNumber
i.CurrentTime = currentTime
i.Duration = duration
i.TimeUpdated = time.Now()
}
// Save the i
_ = m.fileCacher.Set(*m.watchHistoryFileCacheBucket, strconv.Itoa(opts.MediaId), i)
// If the item was added, check if we need to remove the oldest item
if added {
_ = m.trimWatchHistoryItems()
}
return
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *Manager) getWatchHistory(mediaId int) (ret *WatchHistoryItem, exists bool) {
defer util.HandlePanicInModuleThen("continuity/getWatchHistory", func() {
ret = nil
exists = false
})
reqEvent := &WatchHistoryItemRequestedEvent{
MediaId: mediaId,
WatchHistoryItem: ret,
}
hook.GlobalHookManager.OnWatchHistoryItemRequested().Trigger(reqEvent)
ret = reqEvent.WatchHistoryItem
if reqEvent.DefaultPrevented {
return reqEvent.WatchHistoryItem, reqEvent.WatchHistoryItem != nil
}
exists, _ = m.fileCacher.Get(*m.watchHistoryFileCacheBucket, strconv.Itoa(mediaId), &ret)
if exists && ret != nil && ret.Duration > 0 {
// If the item completion ratio is equal or above IgnoreRatioThreshold, don't return anything
ratio := ret.CurrentTime / ret.Duration
if ratio >= IgnoreRatioThreshold {
// Delete the item
go func() {
defer util.HandlePanicInModuleThen("continuity/getWatchHistory", func() {})
_ = m.fileCacher.Delete(*m.watchHistoryFileCacheBucket, strconv.Itoa(mediaId))
}()
return nil, false
}
if ratio < 0.05 {
return nil, false
}
}
return
}
// removes the oldest WatchHistoryItem from the file cache.
func (m *Manager) trimWatchHistoryItems() error {
defer util.HandlePanicInModuleThen("continuity/TrimWatchHistoryItems", func() {})
// Get all the items
items, err := filecache.GetAll[*WatchHistoryItem](m.fileCacher, *m.watchHistoryFileCacheBucket)
if err != nil {
return fmt.Errorf("continuity: Failed to get watch history items: %w", err)
}
// If there are too many items, remove the oldest one
if len(items) > MaxWatchHistoryItems {
var oldestKey string
for key := range items {
if oldestKey == "" || items[key].TimeUpdated.Before(items[oldestKey].TimeUpdated) {
oldestKey = key
}
}
err = m.fileCacher.Delete(*m.watchHistoryFileCacheBucket, oldestKey)
if err != nil {
return fmt.Errorf("continuity: Failed to remove oldest watch history item: %w", err)
}
}
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -0,0 +1,79 @@
package continuity
import (
"github.com/stretchr/testify/require"
"path/filepath"
"seanime/internal/database/db"
"seanime/internal/test_utils"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
)
func TestHistoryItems(t *testing.T) {
test_utils.SetTwoLevelDeep()
test_utils.InitTestProvider(t)
logger := util.NewLogger()
tempDir := t.TempDir()
t.Log(tempDir)
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
cacher, err := filecache.NewCacher(filepath.Join(tempDir, "cache"))
require.NoError(t, err)
manager := NewManager(&NewManagerOptions{
FileCacher: cacher,
Logger: logger,
Database: database,
})
require.NotNil(t, manager)
var mediaIds = make([]int, MaxWatchHistoryItems+1)
for i := 0; i < MaxWatchHistoryItems+1; i++ {
mediaIds[i] = i + 1
}
// Add items to the history
for _, mediaId := range mediaIds {
err = manager.UpdateWatchHistoryItem(&UpdateWatchHistoryItemOptions{
MediaId: mediaId,
EpisodeNumber: 1,
CurrentTime: 10,
Duration: 100,
})
require.NoError(t, err)
}
// Check if the oldest item was removed
items, err := filecache.GetAll[WatchHistoryItem](cacher, *manager.watchHistoryFileCacheBucket)
require.NoError(t, err)
require.Len(t, items, MaxWatchHistoryItems)
// Update an item
err = manager.UpdateWatchHistoryItem(&UpdateWatchHistoryItemOptions{
MediaId: mediaIds[0], // 1
EpisodeNumber: 2,
CurrentTime: 30,
Duration: 100,
})
require.NoError(t, err)
// Check if the item was updated
items, err = filecache.GetAll[WatchHistoryItem](cacher, *manager.watchHistoryFileCacheBucket)
require.NoError(t, err)
require.Len(t, items, MaxWatchHistoryItems)
item, found := items["1"]
require.True(t, found)
require.Equal(t, 2, item.EpisodeNumber)
require.Equal(t, 30., item.CurrentTime)
require.Equal(t, 100., item.Duration)
}

View File

@@ -0,0 +1,38 @@
package continuity
import (
"seanime/internal/hook_resolver"
"seanime/internal/library/anime"
)
// WatchHistoryItemRequestedEvent is triggered when a watch history item is requested.
// Prevent default to skip getting the watch history item from the file cache, in this case the event should have a valid WatchHistoryItem object or set it to nil to indicate that the watch history item was not found.
type WatchHistoryItemRequestedEvent struct {
hook_resolver.Event
MediaId int `json:"mediaId"`
// Empty WatchHistoryItem object, will be used if the hook prevents the default behavior
WatchHistoryItem *WatchHistoryItem `json:"watchHistoryItem"`
}
// WatchHistoryItemUpdatedEvent is triggered when a watch history item is updated.
type WatchHistoryItemUpdatedEvent struct {
hook_resolver.Event
WatchHistoryItem *WatchHistoryItem `json:"watchHistoryItem"`
}
type WatchHistoryLocalFileEpisodeItemRequestedEvent struct {
hook_resolver.Event
Path string
// All scanned local files
LocalFiles []*anime.LocalFile
// Empty WatchHistoryItem object, will be used if the hook prevents the default behavior
WatchHistoryItem *WatchHistoryItem `json:"watchHistoryItem"`
}
type WatchHistoryStreamEpisodeItemRequestedEvent struct {
hook_resolver.Event
Episode int
MediaId int
// Empty WatchHistoryItem object, will be used if the hook prevents the default behavior
WatchHistoryItem *WatchHistoryItem `json:"watchHistoryItem"`
}

View File

@@ -0,0 +1,106 @@
package continuity
import (
"github.com/rs/zerolog"
"github.com/samber/mo"
"seanime/internal/database/db"
"seanime/internal/util/filecache"
"sync"
"time"
)
const (
OnlinestreamKind Kind = "onlinestream"
MediastreamKind Kind = "mediastream"
ExternalPlayerKind Kind = "external_player"
)
type (
// Manager is used to manage the user's viewing history across different media types.
Manager struct {
fileCacher *filecache.Cacher
db *db.Database
watchHistoryFileCacheBucket *filecache.Bucket
externalPlayerEpisodeDetails mo.Option[*ExternalPlayerEpisodeDetails]
logger *zerolog.Logger
settings *Settings
mu sync.RWMutex
}
// ExternalPlayerEpisodeDetails is used to store the episode details when using an external player.
// Since the media player module only cares about the filepath, the PlaybackManager will store the episode number and media id here when playback starts.
ExternalPlayerEpisodeDetails struct {
EpisodeNumber int `json:"episodeNumber"`
MediaId int `json:"mediaId"`
Filepath string `json:"filepath"`
}
Settings struct {
WatchContinuityEnabled bool
}
Kind string
)
type (
NewManagerOptions struct {
FileCacher *filecache.Cacher
Logger *zerolog.Logger
Database *db.Database
}
)
// NewManager creates a new Manager, it should be initialized once.
func NewManager(opts *NewManagerOptions) *Manager {
watchHistoryFileCacheBucket := filecache.NewBucket(WatchHistoryBucketName, time.Hour*24*99999)
ret := &Manager{
fileCacher: opts.FileCacher,
logger: opts.Logger,
db: opts.Database,
watchHistoryFileCacheBucket: &watchHistoryFileCacheBucket,
settings: &Settings{
WatchContinuityEnabled: false,
},
externalPlayerEpisodeDetails: mo.None[*ExternalPlayerEpisodeDetails](),
}
ret.logger.Info().Msg("continuity: Initialized manager")
return ret
}
// SetSettings should be called after initializing the Manager.
func (m *Manager) SetSettings(settings *Settings) {
if m == nil || settings == nil {
return
}
m.mu.Lock()
defer m.mu.Unlock()
m.settings = settings
}
// GetSettings returns the current settings.
func (m *Manager) GetSettings() *Settings {
if m == nil {
return nil
}
m.mu.RLock()
defer m.mu.RUnlock()
return m.settings
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *Manager) SetExternalPlayerEpisodeDetails(details *ExternalPlayerEpisodeDetails) {
if m == nil || details == nil {
return
}
m.mu.Lock()
defer m.mu.Unlock()
m.externalPlayerEpisodeDetails = mo.Some(details)
}

View File

@@ -0,0 +1,24 @@
package continuity
import (
"github.com/stretchr/testify/require"
"path/filepath"
"seanime/internal/database/db"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
)
func GetMockManager(t *testing.T, db *db.Database) *Manager {
logger := util.NewLogger()
cacher, err := filecache.NewCacher(filepath.Join(t.TempDir(), "cache"))
require.NoError(t, err)
manager := NewManager(&NewManagerOptions{
FileCacher: cacher,
Logger: logger,
Database: db,
})
return manager
}

View File

@@ -0,0 +1,109 @@
package core
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/events"
"seanime/internal/platforms/platform"
"seanime/internal/user"
)
// GetUser returns the currently logged-in user or a simulated one.
func (a *App) GetUser() *user.User {
if a.user == nil {
return user.NewSimulatedUser()
}
return a.user
}
func (a *App) GetUserAnilistToken() string {
if a.user == nil || a.user.Token == user.SimulatedUserToken {
return ""
}
return a.user.Token
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// UpdatePlatform changes the current platform to the provided one.
func (a *App) UpdatePlatform(platform platform.Platform) {
a.AnilistPlatform = platform
}
// UpdateAnilistClientToken will update the Anilist Client Wrapper token.
// This function should be called when a user logs in
func (a *App) UpdateAnilistClientToken(token string) {
a.AnilistClient = anilist.NewAnilistClient(token)
a.AnilistPlatform.SetAnilistClient(a.AnilistClient) // Update Anilist Client Wrapper in Platform
}
// GetAnimeCollection returns the user's Anilist collection if it in the cache, otherwise it queries Anilist for the user's collection.
// When bypassCache is true, it will always query Anilist for the user's collection
func (a *App) GetAnimeCollection(bypassCache bool) (*anilist.AnimeCollection, error) {
return a.AnilistPlatform.GetAnimeCollection(context.Background(), bypassCache)
}
// GetRawAnimeCollection is the same as GetAnimeCollection but returns the raw collection that includes custom lists
func (a *App) GetRawAnimeCollection(bypassCache bool) (*anilist.AnimeCollection, error) {
return a.AnilistPlatform.GetRawAnimeCollection(context.Background(), bypassCache)
}
// RefreshAnimeCollection queries Anilist for the user's collection
func (a *App) RefreshAnimeCollection() (*anilist.AnimeCollection, error) {
go func() {
a.OnRefreshAnilistCollectionFuncs.Range(func(key string, f func()) bool {
go f()
return true
})
}()
ret, err := a.AnilistPlatform.RefreshAnimeCollection(context.Background())
if err != nil {
return nil, err
}
// Save the collection to PlaybackManager
a.PlaybackManager.SetAnimeCollection(ret)
// Save the collection to AutoDownloader
a.AutoDownloader.SetAnimeCollection(ret)
// Save the collection to LocalManager
a.LocalManager.SetAnimeCollection(ret)
// Save the collection to DirectStreamManager
a.DirectStreamManager.SetAnimeCollection(ret)
a.WSEventManager.SendEvent(events.RefreshedAnilistAnimeCollection, nil)
return ret, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// GetMangaCollection is the same as GetAnimeCollection but for manga
func (a *App) GetMangaCollection(bypassCache bool) (*anilist.MangaCollection, error) {
return a.AnilistPlatform.GetMangaCollection(context.Background(), bypassCache)
}
// GetRawMangaCollection does not exclude custom lists
func (a *App) GetRawMangaCollection(bypassCache bool) (*anilist.MangaCollection, error) {
return a.AnilistPlatform.GetRawMangaCollection(context.Background(), bypassCache)
}
// RefreshMangaCollection queries Anilist for the user's manga collection
func (a *App) RefreshMangaCollection() (*anilist.MangaCollection, error) {
mc, err := a.AnilistPlatform.RefreshMangaCollection(context.Background())
if err != nil {
return nil, err
}
a.LocalManager.SetMangaCollection(mc)
a.WSEventManager.SendEvent(events.RefreshedAnilistMangaCollection, nil)
return mc, nil
}

View File

@@ -0,0 +1,440 @@
package core
import (
"os"
"runtime"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/constants"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/database/models"
debrid_client "seanime/internal/debrid/client"
"seanime/internal/directstream"
discordrpc_presence "seanime/internal/discordrpc/presence"
"seanime/internal/doh"
"seanime/internal/events"
"seanime/internal/extension_playground"
"seanime/internal/extension_repo"
"seanime/internal/hook"
"seanime/internal/library/autodownloader"
"seanime/internal/library/autoscanner"
"seanime/internal/library/fillermanager"
"seanime/internal/library/playbackmanager"
"seanime/internal/library/scanner"
"seanime/internal/local"
"seanime/internal/manga"
"seanime/internal/mediaplayers/iina"
"seanime/internal/mediaplayers/mediaplayer"
"seanime/internal/mediaplayers/mpchc"
"seanime/internal/mediaplayers/mpv"
"seanime/internal/mediaplayers/vlc"
"seanime/internal/mediastream"
"seanime/internal/nakama"
"seanime/internal/nativeplayer"
"seanime/internal/onlinestream"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/platforms/offline_platform"
"seanime/internal/platforms/platform"
"seanime/internal/platforms/simulated_platform"
"seanime/internal/plugin"
"seanime/internal/report"
"seanime/internal/torrent_clients/torrent_client"
"seanime/internal/torrents/torrent"
"seanime/internal/torrentstream"
"seanime/internal/updater"
"seanime/internal/user"
"seanime/internal/util"
"seanime/internal/util/filecache"
"seanime/internal/util/result"
"sync"
"github.com/rs/zerolog"
)
type (
App struct {
Config *Config
Database *db.Database
Logger *zerolog.Logger
TorrentClientRepository *torrent_client.Repository
TorrentRepository *torrent.Repository
DebridClientRepository *debrid_client.Repository
Watcher *scanner.Watcher
AnilistClient anilist.AnilistClient
AnilistPlatform platform.Platform
OfflinePlatform platform.Platform
LocalManager local.Manager
FillerManager *fillermanager.FillerManager
WSEventManager *events.WSEventManager
AutoDownloader *autodownloader.AutoDownloader
ExtensionRepository *extension_repo.Repository
ExtensionPlaygroundRepository *extension_playground.PlaygroundRepository
DirectStreamManager *directstream.Manager
NativePlayer *nativeplayer.NativePlayer
MediaPlayer struct {
VLC *vlc.VLC
MpcHc *mpchc.MpcHc
Mpv *mpv.Mpv
Iina *iina.Iina
}
MediaPlayerRepository *mediaplayer.Repository
Version string
Updater *updater.Updater
AutoScanner *autoscanner.AutoScanner
PlaybackManager *playbackmanager.PlaybackManager
FileCacher *filecache.Cacher
OnlinestreamRepository *onlinestream.Repository
MangaRepository *manga.Repository
MetadataProvider metadata.Provider
DiscordPresence *discordrpc_presence.Presence
MangaDownloader *manga.Downloader
ContinuityManager *continuity.Manager
Cleanups []func()
OnRefreshAnilistCollectionFuncs *result.Map[string, func()]
OnFlushLogs func()
MediastreamRepository *mediastream.Repository
TorrentstreamRepository *torrentstream.Repository
FeatureFlags FeatureFlags
Settings *models.Settings
SecondarySettings struct {
Mediastream *models.MediastreamSettings
Torrentstream *models.TorrentstreamSettings
Debrid *models.DebridSettings
} // Struct for other settings sent to clientN
SelfUpdater *updater.SelfUpdater
ReportRepository *report.Repository
TotalLibrarySize uint64 // Initialized in modules.go
LibraryDir string
IsDesktopSidecar bool
animeCollection *anilist.AnimeCollection
rawAnimeCollection *anilist.AnimeCollection // (retains custom lists)
mangaCollection *anilist.MangaCollection
rawMangaCollection *anilist.MangaCollection // (retains custom lists)
user *user.User
previousVersion string
moduleMu sync.Mutex
HookManager hook.Manager
ServerReady bool // Whether the Anilist data from the first request has been fetched
isOffline *bool
NakamaManager *nakama.Manager
ServerPasswordHash string // SHA-256 hash of the server password
}
)
// NewApp creates a new server instance
func NewApp(configOpts *ConfigOptions, selfupdater *updater.SelfUpdater) *App {
// Initialize logger with predefined format
logger := util.NewLogger()
// Log application version, OS, architecture and system info
logger.Info().Msgf("app: Seanime %s-%s", constants.Version, constants.VersionName)
logger.Info().Msgf("app: OS: %s", runtime.GOOS)
logger.Info().Msgf("app: Arch: %s", runtime.GOARCH)
logger.Info().Msgf("app: Processor count: %d", runtime.NumCPU())
// Initialize hook manager for plugin event system
hookManager := hook.NewHookManager(hook.NewHookManagerOptions{Logger: logger})
hook.SetGlobalHookManager(hookManager)
plugin.GlobalAppContext.SetLogger(logger)
// Store current version to detect version changes
previousVersion := constants.Version
// Add callback to track version changes
configOpts.OnVersionChange = append(configOpts.OnVersionChange, func(oldVersion string, newVersion string) {
logger.Info().Str("prev", oldVersion).Str("current", newVersion).Msg("app: Version change detected")
previousVersion = oldVersion
})
// Initialize configuration with provided options
// Creates config directory if it doesn't exist
cfg, err := NewConfig(configOpts, logger)
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize config")
}
// Compute SHA-256 hash of the server password
serverPasswordHash := ""
if cfg.Server.Password != "" {
serverPasswordHash = util.HashSHA256Hex(cfg.Server.Password)
}
// Create logs directory if it doesn't exist
_ = os.MkdirAll(cfg.Logs.Dir, 0755)
// Start background process to trim log files
go TrimLogEntries(cfg.Logs.Dir, logger)
logger.Info().Msgf("app: Data directory: %s", cfg.Data.AppDataDir)
logger.Info().Msgf("app: Working directory: %s", cfg.Data.WorkingDir)
// Log if running in desktop sidecar mode
if configOpts.IsDesktopSidecar {
logger.Info().Msg("app: Desktop sidecar mode enabled")
}
// Initialize database connection
database, err := db.NewDatabase(cfg.Data.AppDataDir, cfg.Database.Name, logger)
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize database")
}
HandleNewDatabaseEntries(database, logger)
// Clean up old database entries in background goroutines
database.TrimLocalFileEntries() // Remove old local file entries
database.TrimScanSummaryEntries() // Remove old scan summaries
database.TrimTorrentstreamHistory() // Remove old torrent stream history
// Get anime library paths for plugin context
animeLibraryPaths, _ := database.GetAllLibraryPathsFromSettings()
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
Database: database,
AnimeLibraryPaths: &animeLibraryPaths,
})
// Get Anilist token from database if available
anilistToken := database.GetAnilistToken()
// Initialize Anilist API client with the token
// If the token is empty, the client will not be authenticated
anilistCW := anilist.NewAnilistClient(anilistToken)
// Initialize WebSocket event manager for real-time communication
wsEventManager := events.NewWSEventManager(logger)
// Exit if no WebSocket connections in desktop sidecar mode
if configOpts.IsDesktopSidecar {
wsEventManager.ExitIfNoConnsAsDesktopSidecar()
}
// Initialize DNS-over-HTTPS service in background
go doh.HandleDoH(cfg.Server.DoHUrl, logger)
// Initialize file cache system for media and metadata
fileCacher, err := filecache.NewCacher(cfg.Cache.Dir)
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize file cacher")
}
// Initialize extension repository
extensionRepository := extension_repo.NewRepository(&extension_repo.NewRepositoryOptions{
Logger: logger,
ExtensionDir: cfg.Extensions.Dir,
WSEventManager: wsEventManager,
FileCacher: fileCacher,
HookManager: hookManager,
})
// Load extensions in background
go LoadExtensions(extensionRepository, logger, cfg)
// Initialize metadata provider for media information
metadataProvider := metadata.NewProvider(&metadata.NewProviderImplOptions{
Logger: logger,
FileCacher: fileCacher,
})
// Set initial metadata provider (will change if offline mode is enabled)
activeMetadataProvider := metadataProvider
// Initialize manga repository
mangaRepository := manga.NewRepository(&manga.NewRepositoryOptions{
Logger: logger,
FileCacher: fileCacher,
CacheDir: cfg.Cache.Dir,
ServerURI: cfg.GetServerURI(),
WsEventManager: wsEventManager,
DownloadDir: cfg.Manga.DownloadDir,
Database: database,
})
// Initialize Anilist platform
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistCW, logger)
// Update plugin context with new modules
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
AnilistPlatform: anilistPlatform,
WSEventManager: wsEventManager,
MetadataProvider: metadataProvider,
})
// Initialize sync manager for offline/online synchronization
localManager, err := local.NewManager(&local.NewManagerOptions{
LocalDir: cfg.Offline.Dir,
AssetDir: cfg.Offline.AssetDir,
Logger: logger,
MetadataProvider: metadataProvider,
MangaRepository: mangaRepository,
Database: database,
WSEventManager: wsEventManager,
IsOffline: cfg.Server.Offline,
AnilistPlatform: anilistPlatform,
})
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize sync manager")
}
// Use local metadata provider if in offline mode
if cfg.Server.Offline {
activeMetadataProvider = localManager.GetOfflineMetadataProvider()
}
// Initialize local platform for offline operations
offlinePlatform, err := offline_platform.NewOfflinePlatform(localManager, anilistCW, logger)
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize local platform")
}
// Initialize simulated platform for unauthenticated operations
simulatedPlatform, err := simulated_platform.NewSimulatedPlatform(localManager, anilistCW, logger)
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize simulated platform")
}
// Change active platform if offline mode is enabled
activePlatform := anilistPlatform
if cfg.Server.Offline {
activePlatform = offlinePlatform
} else if !anilistCW.IsAuthenticated() {
logger.Warn().Msg("app: Anilist client is not authenticated, using simulated platform")
activePlatform = simulatedPlatform
}
// Initialize online streaming repository
onlinestreamRepository := onlinestream.NewRepository(&onlinestream.NewRepositoryOptions{
Logger: logger,
FileCacher: fileCacher,
MetadataProvider: activeMetadataProvider,
Platform: activePlatform,
Database: database,
})
// Initialize extension playground for testing extensions
extensionPlaygroundRepository := extension_playground.NewPlaygroundRepository(logger, activePlatform, activeMetadataProvider)
isOffline := cfg.Server.Offline
// Create the main app instance with initialized components
app := &App{
Config: cfg,
Database: database,
AnilistClient: anilistCW,
AnilistPlatform: activePlatform,
OfflinePlatform: offlinePlatform,
LocalManager: localManager,
WSEventManager: wsEventManager,
Logger: logger,
Version: constants.Version,
Updater: updater.New(constants.Version, logger, wsEventManager),
FileCacher: fileCacher,
OnlinestreamRepository: onlinestreamRepository,
MetadataProvider: activeMetadataProvider,
MangaRepository: mangaRepository,
ExtensionRepository: extensionRepository,
ExtensionPlaygroundRepository: extensionPlaygroundRepository,
ReportRepository: report.NewRepository(logger),
TorrentRepository: nil, // Initialized in App.initModulesOnce
FillerManager: nil, // Initialized in App.initModulesOnce
MangaDownloader: nil, // Initialized in App.initModulesOnce
PlaybackManager: nil, // Initialized in App.initModulesOnce
AutoDownloader: nil, // Initialized in App.initModulesOnce
AutoScanner: nil, // Initialized in App.initModulesOnce
MediastreamRepository: nil, // Initialized in App.initModulesOnce
TorrentstreamRepository: nil, // Initialized in App.initModulesOnce
ContinuityManager: nil, // Initialized in App.initModulesOnce
DebridClientRepository: nil, // Initialized in App.initModulesOnce
DirectStreamManager: nil, // Initialized in App.initModulesOnce
NativePlayer: nil, // Initialized in App.initModulesOnce
NakamaManager: nil, // Initialized in App.initModulesOnce
TorrentClientRepository: nil, // Initialized in App.InitOrRefreshModules
MediaPlayerRepository: nil, // Initialized in App.InitOrRefreshModules
DiscordPresence: nil, // Initialized in App.InitOrRefreshModules
previousVersion: previousVersion,
FeatureFlags: NewFeatureFlags(cfg, logger),
IsDesktopSidecar: configOpts.IsDesktopSidecar,
SecondarySettings: struct {
Mediastream *models.MediastreamSettings
Torrentstream *models.TorrentstreamSettings
Debrid *models.DebridSettings
}{Mediastream: nil, Torrentstream: nil},
SelfUpdater: selfupdater,
moduleMu: sync.Mutex{},
OnRefreshAnilistCollectionFuncs: result.NewResultMap[string, func()](),
HookManager: hookManager,
isOffline: &isOffline,
ServerPasswordHash: serverPasswordHash,
}
// Run database migrations if version has changed
app.runMigrations()
// Initialize modules that only need to be initialized once
app.initModulesOnce()
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
IsOffline: app.IsOffline(),
ContinuityManager: app.ContinuityManager,
AutoScanner: app.AutoScanner,
AutoDownloader: app.AutoDownloader,
FileCacher: app.FileCacher,
OnlinestreamRepository: app.OnlinestreamRepository,
MediastreamRepository: app.MediastreamRepository,
TorrentstreamRepository: app.TorrentstreamRepository,
})
if !*app.IsOffline() {
go app.Updater.FetchAnnouncements()
}
// Initialize all modules that depend on settings
app.InitOrRefreshModules()
// Load built-in extensions into extension consumers
app.AddExtensionBankToConsumers()
// Initialize Anilist data if not in offline mode
if !*app.IsOffline() {
app.InitOrRefreshAnilistData()
} else {
app.ServerReady = true
}
// Initialize mediastream settings (for streaming media)
app.InitOrRefreshMediastreamSettings()
// Initialize torrentstream settings (for torrent streaming)
app.InitOrRefreshTorrentstreamSettings()
// Initialize debrid settings (for debrid services)
app.InitOrRefreshDebridSettings()
// Register Nakama manager cleanup
app.AddCleanupFunction(app.NakamaManager.Cleanup)
// Run one-time initialization actions
app.performActionsOnce()
return app
}
func (a *App) IsOffline() *bool {
return a.isOffline
}
func (a *App) AddCleanupFunction(f func()) {
a.Cleanups = append(a.Cleanups, f)
}
func (a *App) AddOnRefreshAnilistCollectionFunc(key string, f func()) {
if key == "" {
return
}
a.OnRefreshAnilistCollectionFuncs.Set(key, f)
}
func (a *App) Cleanup() {
for _, f := range a.Cleanups {
f()
}
}

View File

@@ -0,0 +1,439 @@
package core
import (
"errors"
"fmt"
"os"
"path/filepath"
"seanime/internal/constants"
"seanime/internal/util"
"strconv"
"github.com/rs/zerolog"
"github.com/spf13/viper"
)
type Config struct {
Version string
Server struct {
Host string
Port int
Offline bool
UseBinaryPath bool // Makes $SEANIME_WORKING_DIR point to the binary's directory
Systray bool
DoHUrl string
Password string
}
Database struct {
Name string
}
Web struct {
AssetDir string
}
Logs struct {
Dir string
}
Cache struct {
Dir string
TranscodeDir string
}
Offline struct {
Dir string
AssetDir string
}
Manga struct {
DownloadDir string
LocalDir string
}
Data struct { // Hydrated after config is loaded
AppDataDir string
WorkingDir string
}
Extensions struct {
Dir string
}
Anilist struct {
ClientID string
}
Experimental struct {
MainServerTorrentStreaming bool
}
}
type ConfigOptions struct {
DataDir string // The path to the Seanime data directory, if any
OnVersionChange []func(oldVersion string, newVersion string)
EmbeddedLogo []byte // The embedded logo
IsDesktopSidecar bool // Run as the desktop sidecar
}
// NewConfig initializes the config
func NewConfig(options *ConfigOptions, logger *zerolog.Logger) (*Config, error) {
logger.Debug().Msg("app: Initializing config")
// Set Seanime's environment variables
if os.Getenv("SEANIME_DATA_DIR") != "" {
options.DataDir = os.Getenv("SEANIME_DATA_DIR")
}
defaultHost := "127.0.0.1"
defaultPort := 43211
if os.Getenv("SEANIME_SERVER_HOST") != "" {
defaultHost = os.Getenv("SEANIME_SERVER_HOST")
}
if os.Getenv("SEANIME_SERVER_PORT") != "" {
var err error
defaultPort, err = strconv.Atoi(os.Getenv("SEANIME_SERVER_PORT"))
if err != nil {
return nil, fmt.Errorf("invalid SEANIME_SERVER_PORT environment variable: %s", os.Getenv("SEANIME_SERVER_PORT"))
}
}
// Initialize the app data directory
dataDir, configPath, err := initAppDataDir(options.DataDir, logger)
if err != nil {
return nil, err
}
// Set Seanime's default custom environment variables
if err = setDataDirEnv(dataDir); err != nil {
return nil, err
}
// Configure viper
viper.SetConfigName(constants.ConfigFileName)
viper.SetConfigType("toml")
viper.SetConfigFile(configPath)
// Set default values
viper.SetDefault("version", constants.Version)
viper.SetDefault("server.host", defaultHost)
viper.SetDefault("server.port", defaultPort)
viper.SetDefault("server.offline", false)
// Use the binary's directory as the working directory environment variable on macOS
viper.SetDefault("server.useBinaryPath", true)
//viper.SetDefault("server.systray", true)
viper.SetDefault("database.name", "seanime")
viper.SetDefault("web.assetDir", "$SEANIME_DATA_DIR/assets")
viper.SetDefault("cache.dir", "$SEANIME_DATA_DIR/cache")
viper.SetDefault("cache.transcodeDir", "$SEANIME_DATA_DIR/cache/transcode")
viper.SetDefault("manga.downloadDir", "$SEANIME_DATA_DIR/manga")
viper.SetDefault("manga.localDir", "$SEANIME_DATA_DIR/manga-local")
viper.SetDefault("logs.dir", "$SEANIME_DATA_DIR/logs")
viper.SetDefault("offline.dir", "$SEANIME_DATA_DIR/offline")
viper.SetDefault("offline.assetDir", "$SEANIME_DATA_DIR/offline/assets")
viper.SetDefault("extensions.dir", "$SEANIME_DATA_DIR/extensions")
// Create and populate the config file if it doesn't exist
if err = createConfigFile(configPath); err != nil {
return nil, err
}
// Read the config file
if err := viper.ReadInConfig(); err != nil {
return nil, err
}
// Unmarshal the config values
cfg := &Config{}
if err := viper.Unmarshal(cfg); err != nil {
return nil, err
}
// Update the config if the version has changed
if err := updateVersion(cfg, options); err != nil {
return nil, err
}
// Before expanding the values, check if we need to override the working directory
if err = setWorkingDirEnv(cfg.Server.UseBinaryPath); err != nil {
return nil, err
}
// Expand the values, replacing environment variables
expandEnvironmentValues(cfg)
cfg.Data.AppDataDir = dataDir
cfg.Data.WorkingDir = os.Getenv("SEANIME_WORKING_DIR")
// Check validity of the config
if err := validateConfig(cfg, logger); err != nil {
return nil, err
}
go loadLogo(options.EmbeddedLogo, dataDir)
return cfg, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (cfg *Config) GetServerAddr(df ...string) string {
return fmt.Sprintf("%s:%d", cfg.Server.Host, cfg.Server.Port)
}
func (cfg *Config) GetServerURI(df ...string) string {
pAddr := fmt.Sprintf("http://%s", cfg.GetServerAddr(df...))
if cfg.Server.Host == "" || cfg.Server.Host == "0.0.0.0" {
pAddr = fmt.Sprintf(":%d", cfg.Server.Port)
if len(df) > 0 {
pAddr = fmt.Sprintf("http://%s:%d", df[0], cfg.Server.Port)
}
}
return pAddr
}
func getWorkingDir(useBinaryPath bool) (string, error) {
// Get the working directory
wd, err := os.Getwd()
if err != nil {
return "", err
}
binaryDir := ""
if exe, err := os.Executable(); err == nil {
if p, err := filepath.EvalSymlinks(exe); err == nil {
binaryDir = filepath.Dir(p)
binaryDir = filepath.FromSlash(binaryDir)
}
}
if useBinaryPath && binaryDir != "" {
return binaryDir, nil
}
//// Use the binary's directory as the working directory if needed
//if useBinaryPath {
// exe, err := os.Executable()
// if err != nil {
// return wd, nil // Fallback to working dir
// }
// p, err := filepath.EvalSymlinks(exe)
// if err != nil {
// return wd, nil // Fallback to working dir
// }
// wd = filepath.Dir(p) // Set the binary's directory as the working directory
// return wd, nil
//}
return wd, nil
}
func setDataDirEnv(dataDir string) error {
// Set the data directory environment variable
if os.Getenv("SEANIME_DATA_DIR") == "" {
if err := os.Setenv("SEANIME_DATA_DIR", dataDir); err != nil {
return err
}
}
return nil
}
func setWorkingDirEnv(useBinaryPath bool) error {
// Set the working directory environment variable
wd, err := getWorkingDir(useBinaryPath)
if err != nil {
return err
}
if err = os.Setenv("SEANIME_WORKING_DIR", filepath.FromSlash(wd)); err != nil {
return err
}
return nil
}
// validateConfig checks if the config values are valid
func validateConfig(cfg *Config, logger *zerolog.Logger) error {
if cfg.Server.Host == "" {
return errInvalidConfigValue("server.host", "cannot be empty")
}
if cfg.Server.Port == 0 {
return errInvalidConfigValue("server.port", "cannot be 0")
}
if cfg.Database.Name == "" {
return errInvalidConfigValue("database.name", "cannot be empty")
}
if cfg.Web.AssetDir == "" {
return errInvalidConfigValue("web.assetDir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Web.AssetDir); err != nil {
return wrapInvalidConfigValue("web.assetDir", err)
}
if cfg.Cache.Dir == "" {
return errInvalidConfigValue("cache.dir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Cache.Dir); err != nil {
return wrapInvalidConfigValue("cache.dir", err)
}
if cfg.Cache.TranscodeDir == "" {
return errInvalidConfigValue("cache.transcodeDir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Cache.TranscodeDir); err != nil {
return wrapInvalidConfigValue("cache.transcodeDir", err)
}
if cfg.Logs.Dir == "" {
return errInvalidConfigValue("logs.dir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Logs.Dir); err != nil {
return wrapInvalidConfigValue("logs.dir", err)
}
if cfg.Manga.DownloadDir == "" {
return errInvalidConfigValue("manga.downloadDir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Manga.DownloadDir); err != nil {
return wrapInvalidConfigValue("manga.downloadDir", err)
}
if cfg.Manga.LocalDir == "" {
return errInvalidConfigValue("manga.localDir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Manga.LocalDir); err != nil {
return wrapInvalidConfigValue("manga.localDir", err)
}
if cfg.Extensions.Dir == "" {
return errInvalidConfigValue("extensions.dir", "cannot be empty")
}
if err := checkIsValidPath(cfg.Extensions.Dir); err != nil {
return wrapInvalidConfigValue("extensions.dir", err)
}
// Uncomment if "MainServerTorrentStreaming" is no longer an experimental feature
if cfg.Experimental.MainServerTorrentStreaming {
logger.Warn().Msgf("app: 'Main Server Torrent Streaming' feature is no longer experimental, remove the flag from your config file")
}
return nil
}
func checkIsValidPath(path string) error {
ok := filepath.IsAbs(path)
if !ok {
return errors.New("path is not an absolute path")
}
return nil
}
// errInvalidConfigValue returns an error for an invalid config value
func errInvalidConfigValue(s string, s2 string) error {
return fmt.Errorf("invalid config value: \"%s\" %s", s, s2)
}
func wrapInvalidConfigValue(s string, err error) error {
return fmt.Errorf("invalid config value: \"%s\" %w", s, err)
}
func updateVersion(cfg *Config, opts *ConfigOptions) error {
defer func() {
if r := recover(); r != nil {
// Do nothing
}
}()
if cfg.Version != constants.Version {
for _, f := range opts.OnVersionChange {
f(cfg.Version, constants.Version)
}
cfg.Version = constants.Version
}
viper.Set("version", constants.Version)
return viper.WriteConfig()
}
func expandEnvironmentValues(cfg *Config) {
defer func() {
if r := recover(); r != nil {
// Do nothing
}
}()
cfg.Web.AssetDir = filepath.FromSlash(os.ExpandEnv(cfg.Web.AssetDir))
cfg.Cache.Dir = filepath.FromSlash(os.ExpandEnv(cfg.Cache.Dir))
cfg.Cache.TranscodeDir = filepath.FromSlash(os.ExpandEnv(cfg.Cache.TranscodeDir))
cfg.Logs.Dir = filepath.FromSlash(os.ExpandEnv(cfg.Logs.Dir))
cfg.Manga.DownloadDir = filepath.FromSlash(os.ExpandEnv(cfg.Manga.DownloadDir))
cfg.Manga.LocalDir = filepath.FromSlash(os.ExpandEnv(cfg.Manga.LocalDir))
cfg.Offline.Dir = filepath.FromSlash(os.ExpandEnv(cfg.Offline.Dir))
cfg.Offline.AssetDir = filepath.FromSlash(os.ExpandEnv(cfg.Offline.AssetDir))
cfg.Extensions.Dir = filepath.FromSlash(os.ExpandEnv(cfg.Extensions.Dir))
}
// createConfigFile creates a default config file if it doesn't exist
func createConfigFile(configPath string) error {
_, err := os.Stat(configPath)
if os.IsNotExist(err) {
if err := os.MkdirAll(filepath.Dir(configPath), 0700); err != nil {
return err
}
if err := viper.WriteConfig(); err != nil {
return err
}
}
return nil
}
func initAppDataDir(definedDataDir string, logger *zerolog.Logger) (dataDir string, configPath string, err error) {
// User defined data directory
if definedDataDir != "" {
// Expand environment variables
definedDataDir = filepath.FromSlash(os.ExpandEnv(definedDataDir))
if !filepath.IsAbs(definedDataDir) {
return "", "", errors.New("app: Data directory path must be absolute")
}
// Replace the default data directory
dataDir = definedDataDir
logger.Trace().Str("dataDir", dataDir).Msg("app: Overriding default data directory")
} else {
// Default OS data directory
// windows: %APPDATA%
// unix: $XDG_CONFIG_HOME or $HOME
// darwin: $HOME/Library/Application Support
dataDir, err = os.UserConfigDir()
if err != nil {
return "", "", err
}
// Get the app directory
dataDir = filepath.Join(dataDir, "Seanime")
}
// Create data dir if it doesn't exist
if err := os.MkdirAll(dataDir, 0700); err != nil {
return "", "", err
}
// Get the config file path
// Normalize the config file path
configPath = filepath.FromSlash(filepath.Join(dataDir, constants.ConfigFileName))
// Normalize the data directory path
dataDir = filepath.FromSlash(dataDir)
return
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func loadLogo(embeddedLogo []byte, dataDir string) (err error) {
defer util.HandlePanicInModuleWithError("core/loadLogo", &err)
if len(embeddedLogo) == 0 {
return nil
}
logoPath := filepath.Join(dataDir, "logo.png")
if _, err = os.Stat(logoPath); os.IsNotExist(err) {
if err = os.WriteFile(logoPath, embeddedLogo, 0644); err != nil {
return err
}
}
return nil
}

View File

@@ -0,0 +1,93 @@
package core
import (
"embed"
"io/fs"
"log"
"net/http"
"path/filepath"
"strings"
"time"
"github.com/goccy/go-json"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
)
func NewEchoApp(app *App, webFS *embed.FS) *echo.Echo {
e := echo.New()
e.HideBanner = true
e.HidePort = true
e.Debug = false
e.JSONSerializer = &CustomJSONSerializer{}
distFS, err := fs.Sub(webFS, "web")
if err != nil {
log.Fatal(err)
}
e.Use(middleware.StaticWithConfig(middleware.StaticConfig{
Filesystem: http.FS(distFS),
Browse: true,
HTML5: true,
Skipper: func(c echo.Context) bool {
cUrl := c.Request().URL
if strings.HasPrefix(cUrl.RequestURI(), "/api") ||
strings.HasPrefix(cUrl.RequestURI(), "/events") ||
strings.HasPrefix(cUrl.RequestURI(), "/assets") ||
strings.HasPrefix(cUrl.RequestURI(), "/manga-downloads") ||
strings.HasPrefix(cUrl.RequestURI(), "/offline-assets") {
return true // Continue to the next handler
}
if !strings.HasSuffix(cUrl.Path, ".html") && filepath.Ext(cUrl.Path) == "" {
cUrl.Path = cUrl.Path + ".html"
}
if cUrl.Path == "/.html" {
cUrl.Path = "/index.html"
}
return false // Continue to the filesystem handler
},
}))
app.Logger.Info().Msgf("app: Serving embedded web interface")
// Serve web assets
app.Logger.Info().Msgf("app: Web assets path: %s", app.Config.Web.AssetDir)
e.Static("/assets", app.Config.Web.AssetDir)
// Serve manga downloads
if app.Config.Manga.DownloadDir != "" {
app.Logger.Info().Msgf("app: Manga downloads path: %s", app.Config.Manga.DownloadDir)
e.Static("/manga-downloads", app.Config.Manga.DownloadDir)
}
// Serve offline assets
app.Logger.Info().Msgf("app: Offline assets path: %s", app.Config.Offline.AssetDir)
e.Static("/offline-assets", app.Config.Offline.AssetDir)
return e
}
type CustomJSONSerializer struct{}
func (j *CustomJSONSerializer) Serialize(c echo.Context, i interface{}, indent string) error {
enc := json.NewEncoder(c.Response())
return enc.Encode(i)
}
func (j *CustomJSONSerializer) Deserialize(c echo.Context, i interface{}) error {
dec := json.NewDecoder(c.Request().Body)
return dec.Decode(i)
}
func RunEchoServer(app *App, e *echo.Echo) {
app.Logger.Info().Msgf("app: Server Address: %s", app.Config.GetServerAddr())
// Start the server
go func() {
log.Fatal(e.Start(app.Config.GetServerAddr()))
}()
time.Sleep(100 * time.Millisecond)
app.Logger.Info().Msg("app: Seanime started at " + app.Config.GetServerURI())
}

View File

@@ -0,0 +1,263 @@
package core
import (
"seanime/internal/extension"
"seanime/internal/extension_repo"
manga_providers "seanime/internal/manga/providers"
onlinestream_providers "seanime/internal/onlinestream/providers"
"seanime/internal/torrents/animetosho"
"seanime/internal/torrents/nyaa"
"seanime/internal/torrents/seadex"
"seanime/internal/util"
"github.com/rs/zerolog"
)
func LoadExtensions(extensionRepository *extension_repo.Repository, logger *zerolog.Logger, config *Config) {
//
// Built-in manga providers
//
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "comick",
Name: "ComicK",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeMangaProvider,
Author: "Seanime",
Description: "",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/comick.webp",
}, manga_providers.NewComicK(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "comick-multi",
Name: "ComicK (Multi)",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeMangaProvider,
Author: "Seanime",
Description: "",
Lang: "multi",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/comick.webp",
}, manga_providers.NewComicKMulti(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "mangapill",
Name: "Mangapill",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeMangaProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/mangapill.png",
}, manga_providers.NewMangapill(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "weebcentral",
Name: "WeebCentral",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeMangaProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/weebcentral.png",
}, manga_providers.NewWeebCentral(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "mangadex",
Name: "Mangadex",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeMangaProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/mangadex.png",
}, manga_providers.NewMangadex(logger))
//extensionRepository.ReloadBuiltInExtension(extension.Extension{
// ID: "manganato",
// Name: "Manganato",
// Version: "",
// ManifestURI: "builtin",
// Language: extension.LanguageGo,
// Type: extension.TypeMangaProvider,
// Author: "Seanime",
// Lang: "en",
// Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/manganato.png",
//}, manga_providers.NewManganato(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: manga_providers.LocalProvider,
Name: "Local",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeMangaProvider,
Author: "Seanime",
Lang: "multi",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/local-manga.png",
}, manga_providers.NewLocal(config.Manga.LocalDir, logger))
//
// Built-in online stream providers
//
//extensionRepository.LoadBuiltInOnlinestreamProviderExtension(extension.Extension{
// ID: "gogoanime",
// Name: "Gogoanime",
// Version: "",
// ManifestURI: "builtin",
// Language: extension.LanguageGo,
// Type: extension.TypeOnlinestreamProvider,
// Author: "Seanime",
// Lang: "en",
// Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/gogoanime.png",
//}, onlinestream_providers.NewGogoanime(logger))
//extensionRepository.LoadBuiltInOnlinestreamProviderExtension(extension.Extension{
// ID: "zoro",
// Name: "Hianime",
// Version: "",
// ManifestURI: "builtin",
// Language: extension.LanguageGo,
// Type: extension.TypeOnlinestreamProvider,
// Author: "Seanime",
// Lang: "en",
// Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/hianime.png",
//}, onlinestream_providers.NewZoro(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "animepahe",
Name: "Animepahe",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageTypescript,
Type: extension.TypeOnlinestreamProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/animepahe.png",
Payload: onlinestream_providers.AnimepahePayload,
}, nil)
//
// Built-in torrent providers
//
nyaaUserConfig := extension.UserConfig{
Version: 1,
Fields: []extension.ConfigField{
{
Name: "apiUrl",
Label: "API URL",
Type: extension.ConfigFieldTypeText,
Default: util.Decode("aHR0cHM6Ly9ueWFhLnNpLz9wYWdlPXJzcyZxPSs="),
},
},
}
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "nyaa",
Name: "Nyaa",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/nyaa.png",
UserConfig: &nyaaUserConfig,
}, nyaa.NewProvider(logger, "anime-eng"))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "nyaa-non-eng",
Name: "Nyaa (Non-English)",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
Lang: "multi",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/nyaa.png",
UserConfig: &nyaaUserConfig,
}, nyaa.NewProvider(logger, "anime-non-eng"))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "nyaa-sukebei",
Name: "Nyaa Sukebei",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/nyaa.png",
UserConfig: &extension.UserConfig{
Version: 1,
Fields: []extension.ConfigField{
{
Name: "apiUrl",
Label: "API URL",
Type: extension.ConfigFieldTypeText,
Default: util.Decode("aHR0cHM6Ly9zdWtlYmVpLm55YWEuc2kvP3BhZ2U9cnNzJnE9Kw=="),
},
},
},
}, nyaa.NewSukebeiProvider(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "animetosho",
Name: "AnimeTosho",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/animetosho.png",
}, animetosho.NewProvider(logger))
extensionRepository.ReloadBuiltInExtension(extension.Extension{
ID: "seadex",
Name: "SeaDex",
Version: "",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
Lang: "en",
Icon: "https://raw.githubusercontent.com/5rahim/hibike/main/icons/seadex.png",
UserConfig: &extension.UserConfig{
Version: 1,
Fields: []extension.ConfigField{
{
Name: "apiUrl",
Label: "API URL",
Type: extension.ConfigFieldTypeText,
Default: util.Decode("aHR0cHM6Ly9yZWxlYXNlcy5tb2UvYXBpL2NvbGxlY3Rpb25zL2VudHJpZXMvcmVjb3Jkcw=="),
},
},
},
}, seadex.NewProvider(logger))
extensionRepository.ReloadExternalExtensions()
}
func (a *App) AddExtensionBankToConsumers() {
var consumers = []extension.Consumer{
a.MangaRepository,
a.OnlinestreamRepository,
a.TorrentRepository,
}
for _, consumer := range consumers {
consumer.InitExtensionBank(a.ExtensionRepository.GetExtensionBank())
}
}

View File

@@ -0,0 +1,36 @@
package core
import (
"github.com/rs/zerolog"
"github.com/spf13/viper"
)
type (
FeatureFlags struct {
MainServerTorrentStreaming bool
}
ExperimentalFeatureFlags struct {
}
)
// NewFeatureFlags initializes the feature flags
func NewFeatureFlags(cfg *Config, logger *zerolog.Logger) FeatureFlags {
ff := FeatureFlags{
MainServerTorrentStreaming: viper.GetBool("experimental.mainServerTorrentStreaming"),
}
checkExperimentalFeatureFlags(&ff, cfg, logger)
return ff
}
func checkExperimentalFeatureFlags(ff *FeatureFlags, cfg *Config, logger *zerolog.Logger) {
if ff.MainServerTorrentStreaming {
logger.Warn().Msg("app: [Feature flag] 'Main Server Torrent Streaming' experimental feature is enabled")
}
}
func (ff *FeatureFlags) IsMainServerTorrentStreamingEnabled() bool {
return ff.MainServerTorrentStreaming
}

View File

@@ -0,0 +1,43 @@
package core
import (
"flag"
"fmt"
"strings"
)
type (
SeanimeFlags struct {
DataDir string
Update bool
IsDesktopSidecar bool
}
)
func GetSeanimeFlags() SeanimeFlags {
// Help flag
flag.Usage = func() {
fmt.Printf("Self-hosted, user-friendly, media server for anime and manga enthusiasts.\n\n")
fmt.Printf("Usage:\n seanime [flags]\n\n")
fmt.Printf("Flags:\n")
fmt.Printf(" -datadir, --datadir string")
fmt.Printf(" directory that contains all Seanime data\n")
fmt.Printf(" -update")
fmt.Printf(" update the application\n")
fmt.Printf(" -h show this help message\n")
}
// Parse flags
var dataDir string
flag.StringVar(&dataDir, "datadir", "", "Directory that contains all Seanime data")
var update bool
flag.BoolVar(&update, "update", false, "Update the application")
var isDesktopSidecar bool
flag.BoolVar(&isDesktopSidecar, "desktop-sidecar", false, "Run as the desktop sidecar")
flag.Parse()
return SeanimeFlags{
DataDir: strings.TrimSpace(dataDir),
Update: update,
IsDesktopSidecar: isDesktopSidecar,
}
}

View File

@@ -0,0 +1,19 @@
package core
import (
"seanime/internal/util"
"time"
)
// GetServerPasswordHMACAuth returns an HMAC authenticator using the hashed server password as the base secret
// This is used for server endpoints that don't use Nakama
func (a *App) GetServerPasswordHMACAuth() *util.HMACAuth {
var secret string
if a.Config != nil && a.Config.Server.Password != "" {
secret = a.ServerPasswordHash
} else {
secret = "seanime-default-secret"
}
return util.NewHMACAuth(secret, 24*time.Hour)
}

View File

@@ -0,0 +1,83 @@
package core
import (
"github.com/rs/zerolog"
"os"
"path/filepath"
"sort"
"strings"
"time"
)
func TrimLogEntries(dir string, logger *zerolog.Logger) {
// Get all log files in the directory
entries, err := os.ReadDir(dir)
if err != nil {
logger.Error().Err(err).Msg("core: Failed to read log directory")
return
}
// Get the total size of all log entries
var totalSize int64
for _, file := range entries {
if file.IsDir() {
continue
}
info, err := file.Info()
if err != nil {
continue
}
totalSize += info.Size()
}
var files []os.FileInfo
for _, entry := range entries {
if entry.IsDir() {
continue
}
info, err := entry.Info()
if err != nil {
continue
}
files = append(files, info)
}
var serverLogFiles []os.FileInfo
var scanLogFiles []os.FileInfo
for _, file := range files {
if strings.HasPrefix(file.Name(), "seanime-") {
serverLogFiles = append(serverLogFiles, file)
} else if strings.Contains(file.Name(), "-scan") {
scanLogFiles = append(scanLogFiles, file)
}
}
for _, _files := range [][]os.FileInfo{serverLogFiles, scanLogFiles} {
files := _files
if len(files) <= 1 {
continue
}
// Sort from newest to oldest
sort.Slice(files, func(i, j int) bool {
return files[i].ModTime().After(files[j].ModTime())
})
// Delete all log files older than 14 days
deleted := 0
for i := 1; i < len(files); i++ {
if time.Since(files[i].ModTime()) > 14*24*time.Hour {
err := os.Remove(filepath.Join(dir, files[i].Name()))
if err != nil {
continue
}
deleted++
}
}
if deleted > 0 {
logger.Info().Msgf("app: Deleted %d log files older than 14 days", deleted)
}
}
}

View File

@@ -0,0 +1,117 @@
package core
import (
"seanime/internal/constants"
"seanime/internal/util"
"strings"
"github.com/Masterminds/semver/v3"
)
func (a *App) runMigrations() {
go func() {
done := false
defer func() {
if done {
a.Logger.Info().Msg("app: Version migration complete")
}
}()
defer util.HandlePanicThen(func() {
a.Logger.Error().Msg("app: runMigrations failed")
})
previousVersion, err := semver.NewVersion(a.previousVersion)
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to parse previous version")
return
}
if a.previousVersion != constants.Version {
hasUpdated := util.VersionIsOlderThan(a.previousVersion, constants.Version)
//-----------------------------------------------------------------------------------------
// DEVNOTE: 1.2.0 uses an incorrect manga cache format for MangaSee pages
// This migration will remove all manga cache files that start with "manga_"
if a.previousVersion == "1.2.0" && hasUpdated {
a.Logger.Debug().Msg("app: Executing version migration task")
err := a.FileCacher.RemoveAllBy(func(filename string) bool {
return strings.HasPrefix(filename, "manga_")
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: MIGRATION FAILED; READ THIS")
a.Logger.Error().Msg("app: Failed to remove 'manga' cache files, please clear them manually by going to the settings. Ignore this message if you have no manga cache files.")
}
done = true
}
//-----------------------------------------------------------------------------------------
c1, _ := semver.NewConstraint("<= 1.3.0, >= 1.2.0")
if c1.Check(previousVersion) {
a.Logger.Debug().Msg("app: Executing version migration task")
err := a.FileCacher.RemoveAllBy(func(filename string) bool {
return strings.HasPrefix(filename, "manga_")
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: MIGRATION FAILED; READ THIS")
a.Logger.Error().Msg("app: Failed to remove 'manga' cache files, please clear them manually by going to the settings. Ignore this message if you have no manga cache files.")
}
done = true
}
//-----------------------------------------------------------------------------------------
// DEVNOTE: 1.5.6 uses a different cache format for media streaming info
// -> Delete the cache files when updated from any version between 1.5.0 and 1.5.5
c2, _ := semver.NewConstraint("<= 1.5.5, >= 1.5.0")
if c2.Check(previousVersion) {
a.Logger.Debug().Msg("app: Executing version migration task")
err := a.FileCacher.RemoveAllBy(func(filename string) bool {
return strings.HasPrefix(filename, "mediastream_mediainfo_")
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: MIGRATION FAILED; READ THIS")
a.Logger.Error().Msg("app: Failed to remove transcoding cache files, please clear them manually by going to the settings. Ignore this message if you have no transcoding cache files.")
}
done = true
}
//-----------------------------------------------------------------------------------------
// DEVNOTE: 2.0.0 uses a different cache format for online streaming
// -> Delete the cache files when updated from a version older than 2.0.0 and newer than 1.5.0
c3, _ := semver.NewConstraint("< 2.0.0, >= 1.5.0")
if c3.Check(previousVersion) {
a.Logger.Debug().Msg("app: Executing version migration task")
err := a.FileCacher.RemoveAllBy(func(filename string) bool {
return strings.HasPrefix(filename, "onlinestream_")
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: MIGRATION FAILED; READ THIS")
a.Logger.Error().Msg("app: Failed to remove online streaming cache files, please clear them manually by going to the settings. Ignore this message if you have no online streaming cache files.")
}
done = true
}
//-----------------------------------------------------------------------------------------
// DEVNOTE: 2.1.0 refactored the manga cache format
// -> Delete the cache files when updated from a version older than 2.1.0
c4, _ := semver.NewConstraint("< 2.1.0")
if c4.Check(previousVersion) {
a.Logger.Debug().Msg("app: Executing version migration task")
err := a.FileCacher.RemoveAllBy(func(filename string) bool {
return strings.HasPrefix(filename, "manga_")
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: MIGRATION FAILED; READ THIS")
a.Logger.Error().Msg("app: Failed to remove 'manga' cache files, please clear them manually by going to the settings. Ignore this message if you have no manga cache files.")
}
done = true
}
}
}()
}

View File

@@ -0,0 +1,727 @@
package core
import (
"runtime"
"seanime/internal/api/anilist"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/database/db_bridge"
"seanime/internal/database/models"
debrid_client "seanime/internal/debrid/client"
"seanime/internal/directstream"
discordrpc_presence "seanime/internal/discordrpc/presence"
"seanime/internal/events"
"seanime/internal/library/anime"
"seanime/internal/library/autodownloader"
"seanime/internal/library/autoscanner"
"seanime/internal/library/fillermanager"
"seanime/internal/library/playbackmanager"
"seanime/internal/manga"
"seanime/internal/mediaplayers/iina"
"seanime/internal/mediaplayers/mediaplayer"
"seanime/internal/mediaplayers/mpchc"
"seanime/internal/mediaplayers/mpv"
"seanime/internal/mediaplayers/vlc"
"seanime/internal/mediastream"
"seanime/internal/nakama"
"seanime/internal/nativeplayer"
"seanime/internal/notifier"
"seanime/internal/plugin"
"seanime/internal/torrent_clients/qbittorrent"
"seanime/internal/torrent_clients/torrent_client"
"seanime/internal/torrent_clients/transmission"
"seanime/internal/torrents/torrent"
"seanime/internal/torrentstream"
"seanime/internal/user"
"github.com/cli/browser"
"github.com/rs/zerolog"
)
// initModulesOnce will initialize modules that need to persist.
// This function is called once after the App instance is created.
// The settings of these modules will be set/refreshed in InitOrRefreshModules.
func (a *App) initModulesOnce() {
a.LocalManager.SetRefreshAnilistCollectionsFunc(func() {
_, _ = a.RefreshAnimeCollection()
_, _ = a.RefreshMangaCollection()
})
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
OnRefreshAnilistAnimeCollection: func() {
_, _ = a.RefreshAnimeCollection()
},
OnRefreshAnilistMangaCollection: func() {
_, _ = a.RefreshMangaCollection()
},
})
// +---------------------+
// | Discord RPC |
// +---------------------+
a.DiscordPresence = discordrpc_presence.New(nil, a.Logger)
a.AddCleanupFunction(func() {
a.DiscordPresence.Close()
})
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
DiscordPresence: a.DiscordPresence,
})
// +---------------------+
// | Filler |
// +---------------------+
a.FillerManager = fillermanager.New(&fillermanager.NewFillerManagerOptions{
DB: a.Database,
Logger: a.Logger,
})
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
FillerManager: a.FillerManager,
})
// +---------------------+
// | Continuity |
// +---------------------+
a.ContinuityManager = continuity.NewManager(&continuity.NewManagerOptions{
FileCacher: a.FileCacher,
Logger: a.Logger,
Database: a.Database,
})
// +---------------------+
// | Playback Manager |
// +---------------------+
// Playback Manager
a.PlaybackManager = playbackmanager.New(&playbackmanager.NewPlaybackManagerOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
Platform: a.AnilistPlatform,
MetadataProvider: a.MetadataProvider,
Database: a.Database,
DiscordPresence: a.DiscordPresence,
IsOffline: a.IsOffline(),
ContinuityManager: a.ContinuityManager,
RefreshAnimeCollectionFunc: func() {
_, _ = a.RefreshAnimeCollection()
},
})
// +---------------------+
// | Torrent Repository |
// +---------------------+
a.TorrentRepository = torrent.NewRepository(&torrent.NewRepositoryOptions{
Logger: a.Logger,
MetadataProvider: a.MetadataProvider,
})
// +---------------------+
// | Manga Downloader |
// +---------------------+
a.MangaDownloader = manga.NewDownloader(&manga.NewDownloaderOptions{
Database: a.Database,
Logger: a.Logger,
WSEventManager: a.WSEventManager,
DownloadDir: a.Config.Manga.DownloadDir,
Repository: a.MangaRepository,
IsOffline: a.IsOffline(),
})
a.MangaDownloader.Start()
// +---------------------+
// | Media Stream |
// +---------------------+
a.MediastreamRepository = mediastream.NewRepository(&mediastream.NewRepositoryOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
FileCacher: a.FileCacher,
})
a.AddCleanupFunction(func() {
a.MediastreamRepository.OnCleanup()
})
// +---------------------+
// | Native Player |
// +---------------------+
a.NativePlayer = nativeplayer.New(nativeplayer.NewNativePlayerOptions{
WsEventManager: a.WSEventManager,
Logger: a.Logger,
})
// +---------------------+
// | Direct Stream |
// +---------------------+
a.DirectStreamManager = directstream.NewManager(directstream.NewManagerOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
ContinuityManager: a.ContinuityManager,
MetadataProvider: a.MetadataProvider,
DiscordPresence: a.DiscordPresence,
Platform: a.AnilistPlatform,
RefreshAnimeCollectionFunc: func() {
_, _ = a.RefreshAnimeCollection()
},
IsOffline: a.IsOffline(),
NativePlayer: a.NativePlayer,
})
// +---------------------+
// | Torrent Stream |
// +---------------------+
a.TorrentstreamRepository = torrentstream.NewRepository(&torrentstream.NewRepositoryOptions{
Logger: a.Logger,
BaseAnimeCache: anilist.NewBaseAnimeCache(),
CompleteAnimeCache: anilist.NewCompleteAnimeCache(),
MetadataProvider: a.MetadataProvider,
TorrentRepository: a.TorrentRepository,
Platform: a.AnilistPlatform,
PlaybackManager: a.PlaybackManager,
WSEventManager: a.WSEventManager,
Database: a.Database,
DirectStreamManager: a.DirectStreamManager,
NativePlayer: a.NativePlayer,
})
// +---------------------+
// | Debrid Client Repo |
// +---------------------+
a.DebridClientRepository = debrid_client.NewRepository(&debrid_client.NewRepositoryOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
Database: a.Database,
MetadataProvider: a.MetadataProvider,
Platform: a.AnilistPlatform,
PlaybackManager: a.PlaybackManager,
TorrentRepository: a.TorrentRepository,
DirectStreamManager: a.DirectStreamManager,
})
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
PlaybackManager: a.PlaybackManager,
MangaRepository: a.MangaRepository,
})
// +---------------------+
// | Auto Downloader |
// +---------------------+
a.AutoDownloader = autodownloader.New(&autodownloader.NewAutoDownloaderOptions{
Logger: a.Logger,
TorrentClientRepository: a.TorrentClientRepository,
TorrentRepository: a.TorrentRepository,
Database: a.Database,
WSEventManager: a.WSEventManager,
MetadataProvider: a.MetadataProvider,
DebridClientRepository: a.DebridClientRepository,
IsOffline: a.IsOffline(),
})
// This is run in a goroutine
a.AutoDownloader.Start()
// +---------------------+
// | Auto Scanner |
// +---------------------+
a.AutoScanner = autoscanner.New(&autoscanner.NewAutoScannerOptions{
Database: a.Database,
Platform: a.AnilistPlatform,
Logger: a.Logger,
WSEventManager: a.WSEventManager,
Enabled: false, // Will be set in InitOrRefreshModules
AutoDownloader: a.AutoDownloader,
MetadataProvider: a.MetadataProvider,
LogsDir: a.Config.Logs.Dir,
})
// This is run in a goroutine
a.AutoScanner.Start()
// +---------------------+
// | Nakama |
// +---------------------+
a.NakamaManager = nakama.NewManager(&nakama.NewManagerOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
PlaybackManager: a.PlaybackManager,
TorrentstreamRepository: a.TorrentstreamRepository,
DebridClientRepository: a.DebridClientRepository,
Platform: a.AnilistPlatform,
ServerHost: a.Config.Server.Host,
ServerPort: a.Config.Server.Port,
})
}
// HandleNewDatabaseEntries initializes essential database collections.
// It creates an empty local files collection if one does not already exist.
func HandleNewDatabaseEntries(database *db.Database, logger *zerolog.Logger) {
// Create initial empty local files collection if none exists
if _, _, err := db_bridge.GetLocalFiles(database); err != nil {
_, err := db_bridge.InsertLocalFiles(database, make([]*anime.LocalFile, 0))
if err != nil {
logger.Fatal().Err(err).Msgf("app: Failed to initialize local files in the database")
}
}
}
// InitOrRefreshModules will initialize or refresh modules that depend on settings.
// This function is called:
// - After the App instance is created
// - After settings are updated.
func (a *App) InitOrRefreshModules() {
a.moduleMu.Lock()
defer a.moduleMu.Unlock()
a.Logger.Debug().Msgf("app: Refreshing modules")
// Stop watching if already watching
if a.Watcher != nil {
a.Watcher.StopWatching()
}
// If Discord presence is already initialized, close it
if a.DiscordPresence != nil {
a.DiscordPresence.Close()
}
// Get settings from database
settings, err := a.Database.GetSettings()
if err != nil || settings == nil {
a.Logger.Warn().Msg("app: Did not initialize modules, no settings found")
return
}
a.Settings = settings // Store settings instance in app
if settings.Library != nil {
a.LibraryDir = settings.GetLibrary().LibraryPath
}
// +---------------------+
// | Module settings |
// +---------------------+
// Refresh settings of modules that were initialized in initModulesOnce
notifier.GlobalNotifier.SetSettings(a.Config.Data.AppDataDir, a.Settings.GetNotifications(), a.Logger)
// Refresh updater settings
if settings.Library != nil {
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
AnimeLibraryPaths: a.Database.AllLibraryPathsFromSettings(settings),
})
if a.Updater != nil {
a.Updater.SetEnabled(!settings.Library.DisableUpdateCheck)
}
// Refresh auto scanner settings
if a.AutoScanner != nil {
a.AutoScanner.SetSettings(*settings.Library)
}
// Torrent Repository
a.TorrentRepository.SetSettings(&torrent.RepositorySettings{
DefaultAnimeProvider: settings.Library.TorrentProvider,
})
}
if settings.MediaPlayer != nil {
a.MediaPlayer.VLC = &vlc.VLC{
Host: settings.MediaPlayer.Host,
Port: settings.MediaPlayer.VlcPort,
Password: settings.MediaPlayer.VlcPassword,
Path: settings.MediaPlayer.VlcPath,
Logger: a.Logger,
}
a.MediaPlayer.MpcHc = &mpchc.MpcHc{
Host: settings.MediaPlayer.Host,
Port: settings.MediaPlayer.MpcPort,
Path: settings.MediaPlayer.MpcPath,
Logger: a.Logger,
}
a.MediaPlayer.Mpv = mpv.New(a.Logger, settings.MediaPlayer.MpvSocket, settings.MediaPlayer.MpvPath, settings.MediaPlayer.MpvArgs)
a.MediaPlayer.Iina = iina.New(a.Logger, settings.MediaPlayer.IinaSocket, settings.MediaPlayer.IinaPath, settings.MediaPlayer.IinaArgs)
// Set media player repository
a.MediaPlayerRepository = mediaplayer.NewRepository(&mediaplayer.NewRepositoryOptions{
Logger: a.Logger,
Default: settings.MediaPlayer.Default,
VLC: a.MediaPlayer.VLC,
MpcHc: a.MediaPlayer.MpcHc,
Mpv: a.MediaPlayer.Mpv, // Socket
Iina: a.MediaPlayer.Iina,
WSEventManager: a.WSEventManager,
ContinuityManager: a.ContinuityManager,
})
a.PlaybackManager.SetMediaPlayerRepository(a.MediaPlayerRepository)
a.PlaybackManager.SetSettings(&playbackmanager.Settings{
AutoPlayNextEpisode: a.Settings.GetLibrary().AutoPlayNextEpisode,
})
a.DirectStreamManager.SetSettings(&directstream.Settings{
AutoPlayNextEpisode: a.Settings.GetLibrary().AutoPlayNextEpisode,
AutoUpdateProgress: a.Settings.GetLibrary().AutoUpdateProgress,
})
a.TorrentstreamRepository.SetMediaPlayerRepository(a.MediaPlayerRepository)
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
MediaPlayerRepository: a.MediaPlayerRepository,
})
} else {
a.Logger.Warn().Msg("app: Did not initialize media player module, no settings found")
}
// +---------------------+
// | Torrents |
// +---------------------+
if settings.Torrent != nil {
// Init qBittorrent
qbit := qbittorrent.NewClient(&qbittorrent.NewClientOptions{
Logger: a.Logger,
Username: settings.Torrent.QBittorrentUsername,
Password: settings.Torrent.QBittorrentPassword,
Port: settings.Torrent.QBittorrentPort,
Host: settings.Torrent.QBittorrentHost,
Path: settings.Torrent.QBittorrentPath,
Tags: settings.Torrent.QBittorrentTags,
})
// Login to qBittorrent
go func() {
if settings.Torrent.Default == "qbittorrent" {
err = qbit.Login()
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to login to qBittorrent")
} else {
a.Logger.Info().Msg("app: Logged in to qBittorrent")
}
}
}()
// Init Transmission
trans, err := transmission.New(&transmission.NewTransmissionOptions{
Logger: a.Logger,
Username: settings.Torrent.TransmissionUsername,
Password: settings.Torrent.TransmissionPassword,
Port: settings.Torrent.TransmissionPort,
Host: settings.Torrent.TransmissionHost,
Path: settings.Torrent.TransmissionPath,
})
if err != nil && settings.Torrent.TransmissionUsername != "" && settings.Torrent.TransmissionPassword != "" { // Only log error if username and password are set
a.Logger.Error().Err(err).Msg("app: Failed to initialize transmission client")
}
// Shutdown torrent client first
if a.TorrentClientRepository != nil {
a.TorrentClientRepository.Shutdown()
}
// Torrent Client Repository
a.TorrentClientRepository = torrent_client.NewRepository(&torrent_client.NewRepositoryOptions{
Logger: a.Logger,
QbittorrentClient: qbit,
Transmission: trans,
TorrentRepository: a.TorrentRepository,
Provider: settings.Torrent.Default,
MetadataProvider: a.MetadataProvider,
})
a.TorrentClientRepository.InitActiveTorrentCount(settings.Torrent.ShowActiveTorrentCount, a.WSEventManager)
// Set AutoDownloader qBittorrent client
a.AutoDownloader.SetTorrentClientRepository(a.TorrentClientRepository)
plugin.GlobalAppContext.SetModulesPartial(plugin.AppContextModules{
TorrentClientRepository: a.TorrentClientRepository,
AutoDownloader: a.AutoDownloader,
})
} else {
a.Logger.Warn().Msg("app: Did not initialize torrent client module, no settings found")
}
// +---------------------+
// | AutoDownloader |
// +---------------------+
// Update Auto Downloader - This runs in a goroutine
if settings.AutoDownloader != nil {
a.AutoDownloader.SetSettings(settings.AutoDownloader, settings.Library.TorrentProvider)
}
// +---------------------+
// | Library Watcher |
// +---------------------+
// Initialize library watcher
if settings.Library != nil && len(settings.Library.LibraryPath) > 0 {
go func() {
a.initLibraryWatcher(settings.Library.GetLibraryPaths())
}()
}
// +---------------------+
// | Discord |
// +---------------------+
if settings.Discord != nil && a.DiscordPresence != nil {
a.DiscordPresence.SetSettings(settings.Discord)
}
// +---------------------+
// | Continuity |
// +---------------------+
if settings.Library != nil {
a.ContinuityManager.SetSettings(&continuity.Settings{
WatchContinuityEnabled: settings.Library.EnableWatchContinuity,
})
}
if settings.Manga != nil {
a.MangaRepository.SetSettings(settings)
}
// +---------------------+
// | Nakama |
// +---------------------+
if settings.Nakama != nil {
a.NakamaManager.SetSettings(settings.Nakama)
}
runtime.GC()
a.Logger.Info().Msg("app: Refreshed modules")
}
// InitOrRefreshMediastreamSettings will initialize or refresh the mediastream settings.
// It is called after the App instance is created and after settings are updated.
func (a *App) InitOrRefreshMediastreamSettings() {
var settings *models.MediastreamSettings
var found bool
settings, found = a.Database.GetMediastreamSettings()
if !found {
var err error
settings, err = a.Database.UpsertMediastreamSettings(&models.MediastreamSettings{
BaseModel: models.BaseModel{
ID: 1,
},
TranscodeEnabled: false,
TranscodeHwAccel: "cpu",
TranscodePreset: "fast",
PreTranscodeEnabled: false,
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to initialize mediastream module")
return
}
}
a.MediastreamRepository.InitializeModules(settings, a.Config.Cache.Dir, a.Config.Cache.TranscodeDir)
// Cleanup cache
go func() {
if settings.TranscodeEnabled {
// If transcoding is enabled, trim files
_ = a.FileCacher.TrimMediastreamVideoFiles()
} else {
// If transcoding is disabled, clear all files
_ = a.FileCacher.ClearMediastreamVideoFiles()
}
}()
a.SecondarySettings.Mediastream = settings
}
// InitOrRefreshTorrentstreamSettings will initialize or refresh the mediastream settings.
// It is called after the App instance is created and after settings are updated.
func (a *App) InitOrRefreshTorrentstreamSettings() {
var settings *models.TorrentstreamSettings
var found bool
settings, found = a.Database.GetTorrentstreamSettings()
if !found {
var err error
settings, err = a.Database.UpsertTorrentstreamSettings(&models.TorrentstreamSettings{
BaseModel: models.BaseModel{
ID: 1,
},
Enabled: false,
AutoSelect: true,
PreferredResolution: "",
DisableIPV6: false,
DownloadDir: "",
AddToLibrary: false,
TorrentClientHost: "",
TorrentClientPort: 43213,
StreamingServerHost: "0.0.0.0",
StreamingServerPort: 43214,
IncludeInLibrary: false,
StreamUrlAddress: "",
SlowSeeding: false,
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to initialize mediastream module")
return
}
}
err := a.TorrentstreamRepository.InitModules(settings, a.Config.Server.Host, a.Config.Server.Port)
if err != nil && settings.Enabled {
a.Logger.Error().Err(err).Msg("app: Failed to initialize Torrent streaming module")
//_, _ = a.Database.UpsertTorrentstreamSettings(&models.TorrentstreamSettings{
// BaseModel: models.BaseModel{
// ID: 1,
// },
// Enabled: false,
//})
}
a.Cleanups = append(a.Cleanups, func() {
a.TorrentstreamRepository.Shutdown()
})
// Set torrent streaming settings in secondary settings
// so the client can use them
a.SecondarySettings.Torrentstream = settings
}
func (a *App) InitOrRefreshDebridSettings() {
settings, found := a.Database.GetDebridSettings()
if !found {
var err error
settings, err = a.Database.UpsertDebridSettings(&models.DebridSettings{
BaseModel: models.BaseModel{
ID: 1,
},
Enabled: false,
Provider: "",
ApiKey: "",
IncludeDebridStreamInLibrary: false,
StreamAutoSelect: false,
StreamPreferredResolution: "",
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to initialize debrid module")
return
}
}
a.SecondarySettings.Debrid = settings
err := a.DebridClientRepository.InitializeProvider(settings)
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to initialize debrid provider")
return
}
}
// InitOrRefreshAnilistData will initialize the Anilist anime collection and the account.
// This function should be called after App.Database is initialized and after settings are updated.
func (a *App) InitOrRefreshAnilistData() {
a.Logger.Debug().Msg("app: Fetching Anilist data")
var currUser *user.User
acc, err := a.Database.GetAccount()
if err != nil || acc.Username == "" {
a.ServerReady = true
currUser = user.NewSimulatedUser() // Create a simulated user if no account is found
} else {
currUser, err = user.NewUser(acc)
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to create user from account")
return
}
}
a.user = currUser
// Set username to Anilist platform
a.AnilistPlatform.SetUsername(currUser.Viewer.Name)
a.Logger.Info().Msg("app: Authenticated to AniList")
go func() {
_, err = a.RefreshAnimeCollection()
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to fetch Anilist anime collection")
}
a.ServerReady = true
a.WSEventManager.SendEvent(events.ServerReady, nil)
_, err = a.RefreshMangaCollection()
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to fetch Anilist manga collection")
}
}()
go func(username string) {
a.DiscordPresence.SetUsername(username)
}(currUser.Viewer.Name)
a.Logger.Info().Msg("app: Fetched Anilist data")
}
func (a *App) performActionsOnce() {
go func() {
if a.Settings == nil || a.Settings.Library == nil {
return
}
if a.Settings.GetLibrary().OpenWebURLOnStart {
// Open the web URL
err := browser.OpenURL(a.Config.GetServerURI("127.0.0.1"))
if err != nil {
a.Logger.Warn().Err(err).Msg("app: Failed to open web URL, please open it manually in your browser")
} else {
a.Logger.Info().Msg("app: Opened web URL")
}
}
if a.Settings.GetLibrary().RefreshLibraryOnStart {
go func() {
a.Logger.Debug().Msg("app: Refreshing library")
a.AutoScanner.RunNow()
a.Logger.Info().Msg("app: Refreshed library")
}()
}
if a.Settings.GetLibrary().OpenTorrentClientOnStart && a.TorrentClientRepository != nil {
// Start the torrent client
ok := a.TorrentClientRepository.Start()
if !ok {
a.Logger.Warn().Msg("app: Failed to open torrent client")
} else {
a.Logger.Info().Msg("app: Started torrent client")
}
}
}()
}

View File

@@ -0,0 +1,39 @@
package core
import (
"seanime/internal/api/metadata"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/platforms/offline_platform"
"github.com/spf13/viper"
)
// SetOfflineMode changes the offline mode.
// It updates the config and active AniList platform.
func (a *App) SetOfflineMode(enabled bool) {
// Update the config
a.Config.Server.Offline = enabled
viper.Set("server.offline", enabled)
err := viper.WriteConfig()
if err != nil {
a.Logger.Err(err).Msg("app: Failed to write config after setting offline mode")
}
a.Logger.Info().Bool("enabled", enabled).Msg("app: Offline mode set")
a.isOffline = &enabled
// Update the platform and metadata provider
if enabled {
a.AnilistPlatform, _ = offline_platform.NewOfflinePlatform(a.LocalManager, a.AnilistClient, a.Logger)
a.MetadataProvider = a.LocalManager.GetOfflineMetadataProvider()
} else {
// DEVNOTE: We don't handle local platform since the feature doesn't allow offline mode
a.AnilistPlatform = anilist_platform.NewAnilistPlatform(a.AnilistClient, a.Logger)
a.MetadataProvider = metadata.NewProvider(&metadata.NewProviderImplOptions{
Logger: a.Logger,
FileCacher: a.FileCacher,
})
a.InitOrRefreshAnilistData()
}
a.InitOrRefreshModules()
}

View File

@@ -0,0 +1,122 @@
package core
import (
"fmt"
"os"
"seanime/internal/constants"
"strings"
"github.com/charmbracelet/lipgloss"
"golang.org/x/term"
)
func PrintHeader() {
// Get terminal width
physicalWidth, _, _ := term.GetSize(int(os.Stdout.Fd()))
// Color scheme
// primary := lipgloss.Color("#7B61FF")
// secondary := lipgloss.Color("#5243CB")
// highlight := lipgloss.Color("#14F9D5")
// versionBgColor := lipgloss.Color("#8A2BE2")
subtle := lipgloss.AdaptiveColor{Light: "#D9DCCF", Dark: "#383838"}
// Base styles
docStyle := lipgloss.NewStyle().Padding(1, 2)
if physicalWidth > 0 {
docStyle = docStyle.MaxWidth(physicalWidth)
}
// Build the header
doc := strings.Builder{}
// Logo with gradient effect
logoStyle := lipgloss.NewStyle().Bold(true)
logoLines := strings.Split(asciiLogo(), "\n")
// Create a gradient effect for the logo
gradientColors := []string{"#9370DB", "#8A2BE2", "#7B68EE", "#6A5ACD", "#5243CB"}
for i, line := range logoLines {
colorIdx := i % len(gradientColors)
coloredLine := logoStyle.Foreground(lipgloss.Color(gradientColors[colorIdx])).Render(line)
doc.WriteString(coloredLine + "\n")
}
// App name and version with box
titleBox := lipgloss.NewStyle().
Border(lipgloss.NormalBorder()).
BorderForeground(subtle).
Foreground(lipgloss.Color("#FFF7DB")).
// Background(secondary).
Padding(0, 1).
Bold(true).
Render("Seanime")
versionBox := lipgloss.NewStyle().
Border(lipgloss.NormalBorder()).
BorderForeground(subtle).
Foreground(lipgloss.Color("#ed4760")).
// Background(versionBgColor).
Padding(0, 1).
Bold(true).
Render(constants.Version)
// Version name with different style
versionName := lipgloss.NewStyle().
Italic(true).
Border(lipgloss.NormalBorder()).
BorderForeground(subtle).
Foreground(lipgloss.Color("#FFF7DB")).
// Background(versionBgColor).
Padding(0, 1).
Render(constants.VersionName)
// Combine title elements
titleRow := lipgloss.JoinHorizontal(lipgloss.Center, titleBox, versionBox, versionName)
// Add a decorative line
// lineWidth := min(80, physicalWidth-4)
// line := lipgloss.NewStyle().
// Foreground(subtle).
// Render(strings.Repeat("─", lineWidth))
// Put it all together
doc.WriteString("\n" +
lipgloss.NewStyle().Align(lipgloss.Center).Render(titleRow))
// Print the result
fmt.Println(docStyle.Render(doc.String()))
}
// func asciiLogo() string {
// return `⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣴⣿⣿⠀⠀⠀⢠⣾⣧⣤⡖⠀⠀⠀⠀⠀⠀⠀
// ⠀⠀⠀⠀⠀⠀⠀⠀⢀⣼⠋⠀⠉⠀⢄⣸⣿⣿⣿⣿⣿⣥⡤⢶⣿⣦⣀⡀
// ⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⡆⠀⠀⠀⣙⣛⣿⣿⣿⣿⡏⠀⠀⣀⣿⣿⣿⡟
// ⠀⠀⠀⠀⠀⠀⠀⠀⠙⠻⠷⣦⣤⣤⣬⣽⣿⣿⣿⣿⣿⣿⣿⣟⠛⠿⠋⠀
// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣴⠋⣿⣿⣿⣿⣿⣿⣿⣿⢿⣿⣿⡆⠀⠀
// ⠀⠀⠀⠀⣠⣶⣶⣶⣿⣦⡀⠘⣿⣿⣿⣿⣿⣿⣿⣿⠿⠋⠈⢹⡏⠁⠀⠀
// ⠀⠀⠀⢀⣿⡏⠉⠿⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⡆⠀⢀⣿⡇⠀⠀⠀
// ⠀⠀⠀⢸⣿⠀⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⣟⡘⣿⣿⣃⠀⠀⠀
// ⣴⣷⣀⣸⣿⠀⠀⠀⠀⠀⠀⠘⣿⣿⣿⣿⠹⣿⣯⣤⣾⠏⠉⠉⠉⠙⠢⠀
// ⠈⠙⢿⣿⡟⠀⠀⠀⠀⠀⠀⠀⢸⣿⣿⣿⣄⠛⠉⢩⣷⣴⡆⠀⠀⠀⠀⠀
// ⠀⠀⠀⠋⠀⠀⠀⠀⠀⠀⠀⠀⠈⣿⣿⣿⣿⣀⡠⠋⠈⢿⣇⠀⠀⠀⠀⠀
// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⠿⠿⠛⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀
// `
// }
func asciiLogo() string {
return `
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣀⣠⣴⡇⠀⠀⠀
⠀⢸⣿⣿⣶⣦⣤⣀⡀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣠⣴⣶⣿⣿⣿⣿⣿⡇⠀⠀⠀
⠀⠘⣿⣿⣿⣿⣿⣿⣿⣷⣦⣄⠀⠀⠀⣠⣾⣿⣿⣿⣿⣿⣿⣿⣿⣿⠇⠀⠀⠀
⠀⠀⠹⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⣾⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⡟⠀⠀⠀⠀
⠀⠀⠀⠘⠿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⡿⠏⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠉⠛⠿⣿⣿⣿⣿⣿⣿⣿⣿⡻⣿⣿⣿⠟⠋⠀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⠙⠻⣿⣿⣿⣿⣿⡌⠉⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠘⣿⣿⣿⣿⣿⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣿⣿⣿⣿⣿⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⢀⣠⣤⣴⣶⣶⣶⣦⣤⣤⣄⣉⡉⠛⠷⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⠀⢀⣴⣾⣿⣿⣿⣿⡿⠿⠿⠿⣿⣿⣿⣿⣿⣿⣶⣦⣤⣀⡀⠀⠀⠀⠀⠀⠀
⠉⠉⠀⠀⠉⠉⠀⠀ ⠉ ⠉⠉⠉⠉⠉⠉⠉⠛⠛⠛⠲⠦⠄`
}

View File

@@ -0,0 +1,59 @@
package core
import (
"seanime/internal/library/scanner"
"seanime/internal/util"
"sync"
)
// initLibraryWatcher will initialize the library watcher.
// - Used by AutoScanner
func (a *App) initLibraryWatcher(paths []string) {
// Create a new watcher
watcher, err := scanner.NewWatcher(&scanner.NewWatcherOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to initialize watcher")
return
}
// Initialize library file watcher
err = watcher.InitLibraryFileWatcher(&scanner.WatchLibraryFilesOptions{
LibraryPaths: paths,
})
if err != nil {
a.Logger.Error().Err(err).Msg("app: Failed to watch library files")
return
}
var dirSize uint64 = 0
mu := sync.Mutex{}
wg := sync.WaitGroup{}
for _, path := range paths {
wg.Add(1)
go func(path string) {
defer wg.Done()
ds, _ := util.DirSize(path)
mu.Lock()
dirSize += ds
mu.Unlock()
}(path)
}
wg.Wait()
a.TotalLibrarySize = dirSize
a.Logger.Info().Msgf("watcher: Library size: %s", util.Bytes(dirSize))
// Set the watcher
a.Watcher = watcher
// Start watching
a.Watcher.StartWatching(
func() {
// Notify the auto scanner when a file action occurs
a.AutoScanner.Notify()
})
}

View File

@@ -0,0 +1,79 @@
package cron
import (
"seanime/internal/core"
"time"
)
type JobCtx struct {
App *core.App
}
func RunJobs(app *core.App) {
// Run the jobs only if the server is online
ctx := &JobCtx{
App: app,
}
refreshAnilistTicker := time.NewTicker(10 * time.Minute)
refreshLocalDataTicker := time.NewTicker(30 * time.Minute)
refetchReleaseTicker := time.NewTicker(1 * time.Hour)
refetchAnnouncementsTicker := time.NewTicker(10 * time.Minute)
go func() {
for {
select {
case <-refreshAnilistTicker.C:
if *app.IsOffline() {
continue
}
RefreshAnilistDataJob(ctx)
if app.LocalManager != nil &&
!app.GetUser().IsSimulated &&
app.Settings != nil &&
app.Settings.Library != nil &&
app.Settings.Library.AutoSyncToLocalAccount {
_ = app.LocalManager.SynchronizeAnilistToSimulatedCollection()
}
}
}
}()
go func() {
for {
select {
case <-refreshLocalDataTicker.C:
if *app.IsOffline() {
continue
}
SyncLocalDataJob(ctx)
}
}
}()
go func() {
for {
select {
case <-refetchReleaseTicker.C:
if *app.IsOffline() {
continue
}
app.Updater.ShouldRefetchReleases()
}
}
}()
go func() {
for {
select {
case <-refetchAnnouncementsTicker.C:
if *app.IsOffline() {
continue
}
app.Updater.FetchAnnouncements()
}
}
}()
}

View File

@@ -0,0 +1,50 @@
package cron
import (
"seanime/internal/events"
)
func RefreshAnilistDataJob(c *JobCtx) {
defer func() {
if r := recover(); r != nil {
}
}()
if c.App.Settings == nil || c.App.Settings.Library == nil {
return
}
// Refresh the Anilist Collection
animeCollection, _ := c.App.RefreshAnimeCollection()
if c.App.Settings.GetLibrary().EnableManga {
mangaCollection, _ := c.App.RefreshMangaCollection()
c.App.WSEventManager.SendEvent(events.RefreshedAnilistMangaCollection, mangaCollection)
}
c.App.WSEventManager.SendEvent(events.RefreshedAnilistAnimeCollection, animeCollection)
}
func SyncLocalDataJob(c *JobCtx) {
defer func() {
if r := recover(); r != nil {
}
}()
if c.App.Settings == nil || c.App.Settings.Library == nil {
return
}
// Only synchronize local data if the user is not simulated
if c.App.Settings.Library.AutoSyncOfflineLocalData && !c.App.GetUser().IsSimulated {
c.App.LocalManager.SynchronizeLocal()
}
// Only synchronize local data if the user is not simulated
if c.App.Settings.Library.AutoSaveCurrentMediaOffline && !c.App.GetUser().IsSimulated {
added, _ := c.App.LocalManager.AutoTrackCurrentMedia()
if added && c.App.Settings.Library.AutoSyncOfflineLocalData {
go c.App.LocalManager.SynchronizeLocal()
}
}
}

View File

@@ -0,0 +1,7 @@
# db
Should only import `models` internal package.
### 🚫 Do not
- Do not define **models** here.

View File

@@ -0,0 +1,59 @@
package db
import (
"errors"
"seanime/internal/database/models"
"gorm.io/gorm/clause"
)
var accountCache *models.Account
func (db *Database) UpsertAccount(acc *models.Account) (*models.Account, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(acc).Error
if err != nil {
db.Logger.Error().Err(err).Msg("Failed to save account in the database")
return nil, err
}
if acc.Username != "" {
accountCache = acc
} else {
accountCache = nil
}
return acc, nil
}
func (db *Database) GetAccount() (*models.Account, error) {
if accountCache != nil {
return accountCache, nil
}
var acc models.Account
err := db.gormdb.Last(&acc).Error
if err != nil {
return nil, err
}
if acc.Username == "" || acc.Token == "" || acc.Viewer == nil {
return nil, errors.New("account not found")
}
accountCache = &acc
return &acc, err
}
// GetAnilistToken retrieves the AniList token from the account or returns an empty string
func (db *Database) GetAnilistToken() string {
acc, err := db.GetAccount()
if err != nil {
return ""
}
return acc.Token
}

View File

@@ -0,0 +1,57 @@
package db
import (
"seanime/internal/database/models"
)
func (db *Database) GetAutoDownloaderItems() ([]*models.AutoDownloaderItem, error) {
var res []*models.AutoDownloaderItem
err := db.gormdb.Find(&res).Error
if err != nil {
return nil, err
}
return res, nil
}
func (db *Database) GetAutoDownloaderItem(id uint) (*models.AutoDownloaderItem, error) {
var res models.AutoDownloaderItem
err := db.gormdb.First(&res, id).Error
if err != nil {
return nil, err
}
return &res, nil
}
func (db *Database) GetAutoDownloaderItemByMediaId(mId int) ([]*models.AutoDownloaderItem, error) {
var res []*models.AutoDownloaderItem
err := db.gormdb.Where("media_id = ?", mId).Find(&res).Error
if err != nil {
return nil, err
}
return res, nil
}
func (db *Database) InsertAutoDownloaderItem(item *models.AutoDownloaderItem) error {
err := db.gormdb.Create(item).Error
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteAutoDownloaderItem(id uint) error {
return db.gormdb.Delete(&models.AutoDownloaderItem{}, id).Error
}
// DeleteDownloadedAutoDownloaderItems will delete all the downloaded queued items from the database.
func (db *Database) DeleteDownloadedAutoDownloaderItems() error {
return db.gormdb.Where("downloaded = ?", true).Delete(&models.AutoDownloaderItem{}).Error
}
func (db *Database) UpdateAutoDownloaderItem(id uint, item *models.AutoDownloaderItem) error {
// Save the data
return db.gormdb.Model(&models.AutoDownloaderItem{}).Where("id = ?", id).Updates(item).Error
}

View File

@@ -0,0 +1,135 @@
package db
import (
"errors"
"gorm.io/gorm"
"seanime/internal/database/models"
)
func (db *Database) GetChapterDownloadQueue() ([]*models.ChapterDownloadQueueItem, error) {
var res []*models.ChapterDownloadQueueItem
err := db.gormdb.Find(&res).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to get chapter download queue")
return nil, err
}
return res, nil
}
func (db *Database) GetNextChapterDownloadQueueItem() (*models.ChapterDownloadQueueItem, error) {
var res models.ChapterDownloadQueueItem
err := db.gormdb.Where("status = ?", "not_started").First(&res).Error
if err != nil {
if !errors.Is(err, gorm.ErrRecordNotFound) {
db.Logger.Error().Err(err).Msg("db: Failed to get next chapter download queue item")
}
return nil, nil
}
return &res, nil
}
func (db *Database) DequeueChapterDownloadQueueItem() (*models.ChapterDownloadQueueItem, error) {
// Pop the first item from the queue
var res models.ChapterDownloadQueueItem
err := db.gormdb.Where("status = ?", "downloading").First(&res).Error
if err != nil {
return nil, err
}
err = db.gormdb.Delete(&res).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to delete chapter download queue item")
return nil, err
}
return &res, nil
}
func (db *Database) InsertChapterDownloadQueueItem(item *models.ChapterDownloadQueueItem) error {
// Check if the item already exists
var existingItem models.ChapterDownloadQueueItem
err := db.gormdb.Where("provider = ? AND media_id = ? AND chapter_id = ?", item.Provider, item.MediaID, item.ChapterID).First(&existingItem).Error
if err == nil {
db.Logger.Debug().Msg("db: Chapter download queue item already exists")
return errors.New("chapter is already in the download queue")
}
if item.ChapterID == "" {
return errors.New("chapter ID is empty")
}
if item.Provider == "" {
return errors.New("provider is empty")
}
if item.MediaID == 0 {
return errors.New("media ID is empty")
}
if item.ChapterNumber == "" {
return errors.New("chapter number is empty")
}
err = db.gormdb.Create(item).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to insert chapter download queue item")
return err
}
return nil
}
func (db *Database) UpdateChapterDownloadQueueItemStatus(provider string, mId int, chapterId string, status string) error {
err := db.gormdb.Model(&models.ChapterDownloadQueueItem{}).
Where("provider = ? AND media_id = ? AND chapter_id = ?", provider, mId, chapterId).
Update("status", status).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to update chapter download queue item status")
return err
}
return nil
}
func (db *Database) GetMediaQueuedChapters(mediaId int) ([]*models.ChapterDownloadQueueItem, error) {
var res []*models.ChapterDownloadQueueItem
err := db.gormdb.Where("media_id = ?", mediaId).Find(&res).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to get media queued chapters")
return nil, err
}
return res, nil
}
func (db *Database) ClearAllChapterDownloadQueueItems() error {
err := db.gormdb.
Where("status = ? OR status = ? OR status = ?", "not_started", "downloading", "errored").
Delete(&models.ChapterDownloadQueueItem{}).
Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to clear all chapter download queue items")
return err
}
return nil
}
func (db *Database) ResetErroredChapterDownloadQueueItems() error {
err := db.gormdb.Model(&models.ChapterDownloadQueueItem{}).
Where("status = ?", "errored").
Update("status", "not_started").Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to reset errored chapter download queue items")
return err
}
return nil
}
func (db *Database) ResetDownloadingChapterDownloadQueueItems() error {
err := db.gormdb.Model(&models.ChapterDownloadQueueItem{}).
Where("status = ?", "downloading").
Update("status", "not_started").Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to reset downloading chapter download queue items")
return err
}
return nil
}

View File

@@ -0,0 +1,102 @@
package db
import (
"fmt"
"log"
"os"
"path/filepath"
"seanime/internal/database/models"
"time"
"github.com/glebarez/sqlite"
"github.com/rs/zerolog"
"github.com/samber/mo"
"gorm.io/gorm"
gormlogger "gorm.io/gorm/logger"
)
type Database struct {
gormdb *gorm.DB
Logger *zerolog.Logger
CurrMediaFillers mo.Option[map[int]*MediaFillerItem]
}
func (db *Database) Gorm() *gorm.DB {
return db.gormdb
}
func NewDatabase(appDataDir, dbName string, logger *zerolog.Logger) (*Database, error) {
// Set the SQLite database path
var sqlitePath string
if os.Getenv("TEST_ENV") == "true" {
sqlitePath = ":memory:"
} else {
sqlitePath = filepath.Join(appDataDir, dbName+".db")
}
// Connect to the SQLite database
db, err := gorm.Open(sqlite.Open(sqlitePath), &gorm.Config{
Logger: gormlogger.New(
log.New(os.Stdout, "\r\n", log.LstdFlags),
gormlogger.Config{
SlowThreshold: time.Second,
LogLevel: gormlogger.Error,
IgnoreRecordNotFoundError: true,
ParameterizedQueries: false,
Colorful: true,
},
),
})
if err != nil {
return nil, err
}
// Migrate tables
err = migrateTables(db)
if err != nil {
logger.Fatal().Err(err).Msg("db: Failed to perform auto migration")
return nil, err
}
logger.Info().Str("name", fmt.Sprintf("%s.db", dbName)).Msg("db: Database instantiated")
return &Database{
gormdb: db,
Logger: logger,
CurrMediaFillers: mo.None[map[int]*MediaFillerItem](),
}, nil
}
// MigrateTables performs auto migration on the database
func migrateTables(db *gorm.DB) error {
err := db.AutoMigrate(
&models.LocalFiles{},
&models.Settings{},
&models.Account{},
&models.Mal{},
&models.ScanSummary{},
&models.AutoDownloaderRule{},
&models.AutoDownloaderItem{},
&models.SilencedMediaEntry{},
&models.Theme{},
&models.PlaylistEntry{},
&models.ChapterDownloadQueueItem{},
&models.TorrentstreamSettings{},
&models.TorrentstreamHistory{},
&models.MediastreamSettings{},
&models.MediaFiller{},
&models.MangaMapping{},
&models.OnlinestreamMapping{},
&models.DebridSettings{},
&models.DebridTorrentItem{},
&models.PluginData{},
//&models.MangaChapterContainer{},
)
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,56 @@
package db
import (
"seanime/internal/database/models"
)
func (db *Database) GetDebridTorrentItems() ([]*models.DebridTorrentItem, error) {
var res []*models.DebridTorrentItem
err := db.gormdb.Find(&res).Error
if err != nil {
return nil, err
}
return res, nil
}
func (db *Database) GetDebridTorrentItemByDbId(dbId uint) (*models.DebridTorrentItem, error) {
var res models.DebridTorrentItem
err := db.gormdb.First(&res, dbId).Error
if err != nil {
return nil, err
}
return &res, nil
}
func (db *Database) GetDebridTorrentItemByTorrentItemId(tId string) (*models.DebridTorrentItem, error) {
var res *models.DebridTorrentItem
err := db.gormdb.Where("torrent_item_id = ?", tId).First(&res).Error
if err != nil {
return nil, err
}
return res, nil
}
func (db *Database) InsertDebridTorrentItem(item *models.DebridTorrentItem) error {
err := db.gormdb.Create(item).Error
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteDebridTorrentItemByDbId(dbId uint) error {
return db.gormdb.Delete(&models.DebridTorrentItem{}, dbId).Error
}
func (db *Database) DeleteDebridTorrentItemByTorrentItemId(tId string) error {
return db.gormdb.Where("torrent_item_id = ?", tId).Delete(&models.DebridTorrentItem{}).Error
}
func (db *Database) UpdateDebridTorrentItemByDbId(dbId uint, item *models.DebridTorrentItem) error {
// Save the data
return db.gormdb.Model(&models.DebridTorrentItem{}).Where("id = ?", dbId).Updates(item).Error
}

View File

@@ -0,0 +1,51 @@
package db
import (
"seanime/internal/database/models"
"gorm.io/gorm/clause"
)
// TrimLocalFileEntries will trim the local file entries if there are more than 10 entries.
// This is run in a goroutine.
func (db *Database) TrimLocalFileEntries() {
go func() {
var count int64
err := db.gormdb.Model(&models.LocalFiles{}).Count(&count).Error
if err != nil {
db.Logger.Error().Err(err).Msg("database: Failed to count local file entries")
return
}
if count > 10 {
// Leave 5 entries
err = db.gormdb.Delete(&models.LocalFiles{}, "id IN (SELECT id FROM local_files ORDER BY id ASC LIMIT ?)", count-5).Error
if err != nil {
db.Logger.Error().Err(err).Msg("database: Failed to delete old local file entries")
return
}
}
}()
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (db *Database) UpsertLocalFiles(lfs *models.LocalFiles) (*models.LocalFiles, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(lfs).Error
if err != nil {
return nil, err
}
return lfs, nil
}
func (db *Database) InsertLocalFiles(lfs *models.LocalFiles) (*models.LocalFiles, error) {
err := db.gormdb.Create(lfs).Error
if err != nil {
return nil, err
}
return lfs, nil
}

View File

@@ -0,0 +1,50 @@
package db
import (
"errors"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"seanime/internal/database/models"
)
func (db *Database) GetMalInfo() (*models.Mal, error) {
// Get the first entry
var res models.Mal
err := db.gormdb.First(&res, 1).Error
if err != nil && errors.Is(err, gorm.ErrRecordNotFound) {
return nil, errors.New("MAL not connected")
} else if err != nil {
return nil, err
}
return &res, nil
}
func (db *Database) UpsertMalInfo(info *models.Mal) (*models.Mal, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(info).Error
if err != nil {
return nil, err
}
return info, nil
}
func (db *Database) InsertMalInfo(info *models.Mal) (*models.Mal, error) {
err := db.gormdb.Create(info).Error
if err != nil {
return nil, err
}
return info, nil
}
func (db *Database) DeleteMalInfo() error {
err := db.gormdb.Delete(&models.Mal{}, 1).Error
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,102 @@
package db
import (
"fmt"
"seanime/internal/database/models"
"seanime/internal/util/result"
)
var mangaMappingCache = result.NewResultMap[string, *models.MangaMapping]()
func formatMangaMappingCacheKey(provider string, mediaId int) string {
return fmt.Sprintf("%s$%d", provider, mediaId)
}
func (db *Database) GetMangaMapping(provider string, mediaId int) (*models.MangaMapping, bool) {
if res, ok := mangaMappingCache.Get(formatMangaMappingCacheKey(provider, mediaId)); ok {
return res, true
}
var res models.MangaMapping
err := db.gormdb.Where("provider = ? AND media_id = ?", provider, mediaId).First(&res).Error
if err != nil {
return nil, false
}
mangaMappingCache.Set(formatMangaMappingCacheKey(provider, mediaId), &res)
return &res, true
}
func (db *Database) InsertMangaMapping(provider string, mediaId int, mangaId string) error {
mapping := models.MangaMapping{
Provider: provider,
MediaID: mediaId,
MangaID: mangaId,
}
mangaMappingCache.Set(formatMangaMappingCacheKey(provider, mediaId), &mapping)
return db.gormdb.Save(&mapping).Error
}
func (db *Database) DeleteMangaMapping(provider string, mediaId int) error {
err := db.gormdb.Where("provider = ? AND media_id = ?", provider, mediaId).Delete(&models.MangaMapping{}).Error
if err != nil {
return err
}
mangaMappingCache.Delete(formatMangaMappingCacheKey(provider, mediaId))
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var mangaChapterContainerCache = result.NewResultMap[string, *models.MangaChapterContainer]()
func formatMangaChapterContainerCacheKey(provider string, mediaId int, chapterId string) string {
return fmt.Sprintf("%s$%d$%s", provider, mediaId, chapterId)
}
func (db *Database) GetMangaChapterContainer(provider string, mediaId int, chapterId string) (*models.MangaChapterContainer, bool) {
if res, ok := mangaChapterContainerCache.Get(formatMangaChapterContainerCacheKey(provider, mediaId, chapterId)); ok {
return res, true
}
var res models.MangaChapterContainer
err := db.gormdb.Where("provider = ? AND media_id = ? AND chapter_id = ?", provider, mediaId, chapterId).First(&res).Error
if err != nil {
return nil, false
}
mangaChapterContainerCache.Set(formatMangaChapterContainerCacheKey(provider, mediaId, chapterId), &res)
return &res, true
}
func (db *Database) InsertMangaChapterContainer(provider string, mediaId int, chapterId string, chapterContainer []byte) error {
container := models.MangaChapterContainer{
Provider: provider,
MediaID: mediaId,
ChapterID: chapterId,
Data: chapterContainer,
}
mangaChapterContainerCache.Set(formatMangaChapterContainerCacheKey(provider, mediaId, chapterId), &container)
return db.gormdb.Save(&container).Error
}
func (db *Database) DeleteMangaChapterContainer(provider string, mediaId int, chapterId string) error {
err := db.gormdb.Where("provider = ? AND media_id = ? AND chapter_id = ?", provider, mediaId, chapterId).Delete(&models.MangaChapterContainer{}).Error
if err != nil {
return err
}
mangaChapterContainerCache.Delete(formatMangaChapterContainerCacheKey(provider, mediaId, chapterId))
return nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -0,0 +1,182 @@
package db
import (
"github.com/goccy/go-json"
"github.com/samber/mo"
"seanime/internal/api/filler"
"seanime/internal/database/models"
"time"
)
type MediaFillerItem struct {
DbId uint `json:"dbId"`
Provider string `json:"provider"`
Slug string `json:"slug"`
MediaId int `json:"mediaId"`
LastFetchedAt time.Time `json:"lastFetchedAt"`
FillerEpisodes []string `json:"fillerEpisodes"`
}
// GetCachedMediaFillers will return all the media fillers (cache-first).
// If the cache is empty, it will fetch the media fillers from the database.
func (db *Database) GetCachedMediaFillers() (map[int]*MediaFillerItem, error) {
if db.CurrMediaFillers.IsPresent() {
return db.CurrMediaFillers.MustGet(), nil
}
var res []*models.MediaFiller
err := db.gormdb.Find(&res).Error
if err != nil {
return nil, err
}
// Unmarshal the media fillers
mediaFillers := make(map[int]*MediaFillerItem)
for _, mf := range res {
var fillerData filler.Data
if err := json.Unmarshal(mf.Data, &fillerData); err != nil {
return nil, err
}
// Get the filler episodes
var fillerEpisodes []string
if fillerData.FillerEpisodes != nil || len(fillerData.FillerEpisodes) > 0 {
fillerEpisodes = fillerData.FillerEpisodes
}
mediaFillers[mf.MediaID] = &MediaFillerItem{
DbId: mf.ID,
Provider: mf.Provider,
MediaId: mf.MediaID,
Slug: mf.Slug,
LastFetchedAt: mf.LastFetchedAt,
FillerEpisodes: fillerEpisodes,
}
}
// Cache the media fillers
db.CurrMediaFillers = mo.Some(mediaFillers)
return db.CurrMediaFillers.MustGet(), nil
}
func (db *Database) GetMediaFillerItem(mediaId int) (*MediaFillerItem, bool) {
mediaFillers, err := db.GetCachedMediaFillers()
if err != nil {
return nil, false
}
item, ok := mediaFillers[mediaId]
return item, ok
}
func (db *Database) InsertMediaFiller(
provider string,
mediaId int,
slug string,
lastFetchedAt time.Time,
fillerEpisodes []string,
) error {
// Marshal the filler data
fillerData := filler.Data{
FillerEpisodes: fillerEpisodes,
}
fillerDataBytes, err := json.Marshal(fillerData)
if err != nil {
return err
}
// Delete the existing media filler
_ = db.DeleteMediaFiller(mediaId)
// Save the media filler
err = db.gormdb.Create(&models.MediaFiller{
Provider: provider,
MediaID: mediaId,
Slug: slug,
LastFetchedAt: lastFetchedAt,
Data: fillerDataBytes,
}).Error
if err != nil {
return err
}
// Update the cache
db.CurrMediaFillers = mo.None[map[int]*MediaFillerItem]()
return nil
}
// SaveCachedMediaFillerItems will save the cached media filler items in the database.
// Call this function after editing the cached media filler items.
func (db *Database) SaveCachedMediaFillerItems() error {
if db.CurrMediaFillers.IsAbsent() {
return nil
}
mediaFillers, err := db.GetCachedMediaFillers()
if err != nil {
return err
}
for _, mf := range mediaFillers {
if len(mf.FillerEpisodes) == 0 {
continue
}
// Marshal the filler data
fillerData := filler.Data{
FillerEpisodes: mf.FillerEpisodes,
}
fillerDataBytes, err := json.Marshal(fillerData)
if err != nil {
return err
}
// Save the media filler
err = db.gormdb.Model(&models.MediaFiller{}).
Where("id = ?", mf.DbId).
Updates(map[string]interface{}{
"last_fetched_at": mf.LastFetchedAt,
"data": fillerDataBytes,
}).Error
if err != nil {
return err
}
}
// Update the cache
db.CurrMediaFillers = mo.None[map[int]*MediaFillerItem]()
return nil
}
func (db *Database) DeleteMediaFiller(mediaId int) error {
mediaFillers, err := db.GetCachedMediaFillers()
if err != nil {
return err
}
item, ok := mediaFillers[mediaId]
if !ok {
return nil
}
err = db.gormdb.Delete(&models.MediaFiller{}, item.DbId).Error
if err != nil {
return err
}
// Update the cache
db.CurrMediaFillers = mo.None[map[int]*MediaFillerItem]()
return nil
}

View File

@@ -0,0 +1,24 @@
package db
import (
"seanime/internal/database/models"
)
func (db *Database) UpsertNakamaSettings(nakamaSettings *models.NakamaSettings) (*models.NakamaSettings, error) {
// Get current settings
currentSettings, err := db.GetSettings()
if err != nil {
return nil, err
}
// Update the settings
*(currentSettings.Nakama) = *nakamaSettings
_, err = db.UpsertSettings(currentSettings)
if err != nil {
return nil, err
}
return nakamaSettings, nil
}

View File

@@ -0,0 +1,52 @@
package db
import (
"fmt"
"seanime/internal/database/models"
"seanime/internal/util/result"
)
var onlinestreamMappingCache = result.NewResultMap[string, *models.OnlinestreamMapping]()
func formatOnlinestreamMappingCacheKey(provider string, mediaId int) string {
return fmt.Sprintf("%s$%d", provider, mediaId)
}
func (db *Database) GetOnlinestreamMapping(provider string, mediaId int) (*models.OnlinestreamMapping, bool) {
if res, ok := onlinestreamMappingCache.Get(formatOnlinestreamMappingCacheKey(provider, mediaId)); ok {
return res, true
}
var res models.OnlinestreamMapping
err := db.gormdb.Where("provider = ? AND media_id = ?", provider, mediaId).First(&res).Error
if err != nil {
return nil, false
}
onlinestreamMappingCache.Set(formatOnlinestreamMappingCacheKey(provider, mediaId), &res)
return &res, true
}
func (db *Database) InsertOnlinestreamMapping(provider string, mediaId int, animeId string) error {
mapping := models.OnlinestreamMapping{
Provider: provider,
MediaID: mediaId,
AnimeID: animeId,
}
onlinestreamMappingCache.Set(formatOnlinestreamMappingCacheKey(provider, mediaId), &mapping)
return db.gormdb.Save(&mapping).Error
}
func (db *Database) DeleteOnlinestreamMapping(provider string, mediaId int) error {
err := db.gormdb.Where("provider = ? AND media_id = ?", provider, mediaId).Delete(&models.OnlinestreamMapping{}).Error
if err != nil {
return err
}
onlinestreamMappingCache.Delete(formatOnlinestreamMappingCacheKey(provider, mediaId))
return nil
}

View File

@@ -0,0 +1,24 @@
package db
import (
"seanime/internal/database/models"
)
func (db *Database) TrimScanSummaryEntries() {
go func() {
var count int64
err := db.gormdb.Model(&models.ScanSummary{}).Count(&count).Error
if err != nil {
db.Logger.Error().Err(err).Msg("Failed to count scan summary entries")
return
}
if count > 10 {
// Leave 5 entries
err = db.gormdb.Delete(&models.ScanSummary{}, "id IN (SELECT id FROM scan_summaries ORDER BY id ASC LIMIT ?)", count-5).Error
if err != nil {
db.Logger.Error().Err(err).Msg("Failed to delete old scan summary entries")
return
}
}
}()
}

View File

@@ -0,0 +1,200 @@
package db
import (
"seanime/internal/database/models"
"gorm.io/gorm/clause"
)
var CurrSettings *models.Settings
func (db *Database) UpsertSettings(settings *models.Settings) (*models.Settings, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(settings).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to save settings in the database")
return nil, err
}
CurrSettings = settings
db.Logger.Debug().Msg("db: Settings saved")
return settings, nil
}
func (db *Database) GetSettings() (*models.Settings, error) {
if CurrSettings != nil {
return CurrSettings, nil
}
var settings models.Settings
err := db.gormdb.Where("id = ?", 1).Find(&settings).Error
if err != nil {
return nil, err
}
return &settings, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (db *Database) GetLibraryPathFromSettings() (string, error) {
settings, err := db.GetSettings()
if err != nil {
return "", err
}
return settings.Library.LibraryPath, nil
}
func (db *Database) GetAdditionalLibraryPathsFromSettings() ([]string, error) {
settings, err := db.GetSettings()
if err != nil {
return []string{}, err
}
return settings.Library.LibraryPaths, nil
}
func (db *Database) GetAllLibraryPathsFromSettings() ([]string, error) {
settings, err := db.GetSettings()
if err != nil {
return []string{}, err
}
if settings.Library == nil {
return []string{}, nil
}
return append([]string{settings.Library.LibraryPath}, settings.Library.LibraryPaths...), nil
}
func (db *Database) AllLibraryPathsFromSettings(settings *models.Settings) *[]string {
if settings.Library == nil {
return &[]string{}
}
r := append([]string{settings.Library.LibraryPath}, settings.Library.LibraryPaths...)
return &r
}
func (db *Database) AutoUpdateProgressIsEnabled() (bool, error) {
settings, err := db.GetSettings()
if err != nil {
return false, err
}
return settings.Library.AutoUpdateProgress, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var CurrMediastreamSettings *models.MediastreamSettings
func (db *Database) UpsertMediastreamSettings(settings *models.MediastreamSettings) (*models.MediastreamSettings, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(settings).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to save media streaming settings in the database")
return nil, err
}
CurrMediastreamSettings = settings
db.Logger.Debug().Msg("db: Media streaming settings saved")
return settings, nil
}
func (db *Database) GetMediastreamSettings() (*models.MediastreamSettings, bool) {
if CurrMediastreamSettings != nil {
return CurrMediastreamSettings, true
}
var settings models.MediastreamSettings
err := db.gormdb.Where("id = ?", 1).First(&settings).Error
if err != nil {
return nil, false
}
return &settings, true
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var CurrTorrentstreamSettings *models.TorrentstreamSettings
func (db *Database) UpsertTorrentstreamSettings(settings *models.TorrentstreamSettings) (*models.TorrentstreamSettings, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(settings).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to save torrent streaming settings in the database")
return nil, err
}
CurrTorrentstreamSettings = settings
db.Logger.Debug().Msg("db: Torrent streaming settings saved")
return settings, nil
}
func (db *Database) GetTorrentstreamSettings() (*models.TorrentstreamSettings, bool) {
if CurrTorrentstreamSettings != nil {
return CurrTorrentstreamSettings, true
}
var settings models.TorrentstreamSettings
err := db.gormdb.Where("id = ?", 1).First(&settings).Error
if err != nil {
return nil, false
}
return &settings, true
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var CurrentDebridSettings *models.DebridSettings
func (db *Database) UpsertDebridSettings(settings *models.DebridSettings) (*models.DebridSettings, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(settings).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to save debrid settings in the database")
return nil, err
}
CurrentDebridSettings = settings
db.Logger.Debug().Msg("db: Debrid settings saved")
return settings, nil
}
func (db *Database) GetDebridSettings() (*models.DebridSettings, bool) {
if CurrentDebridSettings != nil {
return CurrentDebridSettings, true
}
var settings models.DebridSettings
err := db.gormdb.Where("id = ?", 1).First(&settings).Error
if err != nil {
return nil, false
}
return &settings, true
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -0,0 +1,70 @@
package db
import (
"gorm.io/gorm/clause"
"seanime/internal/database/models"
)
func (db *Database) GetSilencedMediaEntries() ([]*models.SilencedMediaEntry, error) {
var res []*models.SilencedMediaEntry
err := db.gormdb.Find(&res).Error
if err != nil {
return nil, err
}
return res, nil
}
// GetSilencedMediaEntryIds returns the ids of all silenced media entries.
// It returns an empty slice if there is an error.
func (db *Database) GetSilencedMediaEntryIds() ([]int, error) {
var res []*models.SilencedMediaEntry
err := db.gormdb.Find(&res).Error
if err != nil {
return make([]int, 0), err
}
if len(res) == 0 {
return make([]int, 0), nil
}
mIds := make([]int, len(res))
for i, v := range res {
mIds[i] = int(v.ID)
}
return mIds, nil
}
func (db *Database) GetSilencedMediaEntry(mId uint) (*models.SilencedMediaEntry, error) {
var res models.SilencedMediaEntry
err := db.gormdb.First(&res, mId).Error
if err != nil {
return nil, err
}
return &res, nil
}
func (db *Database) InsertSilencedMediaEntry(mId uint) error {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(&models.SilencedMediaEntry{
BaseModel: models.BaseModel{
ID: mId,
},
}).Error
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteSilencedMediaEntry(id uint) error {
err := db.gormdb.Delete(&models.SilencedMediaEntry{}, id).Error
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,47 @@
package db
import (
"gorm.io/gorm/clause"
"seanime/internal/database/models"
)
var themeCache *models.Theme
func (db *Database) GetTheme() (*models.Theme, error) {
if themeCache != nil {
return themeCache, nil
}
var theme models.Theme
err := db.gormdb.Where("id = ?", 1).Find(&theme).Error
if err != nil {
return nil, err
}
themeCache = &theme
return &theme, nil
}
// UpsertTheme updates the theme settings.
func (db *Database) UpsertTheme(settings *models.Theme) (*models.Theme, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
UpdateAll: true,
}).Create(settings).Error
if err != nil {
db.Logger.Error().Err(err).Msg("db: Failed to save theme in the database")
return nil, err
}
db.Logger.Debug().Msg("db: Theme saved")
themeCache = settings
return settings, nil
}

View File

@@ -0,0 +1,21 @@
package db
import (
"gorm.io/gorm/clause"
"seanime/internal/database/models"
)
func (db *Database) UpsertToken(token *models.Token) (*models.Token, error) {
err := db.gormdb.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "id"}},
DoUpdates: clause.AssignmentColumns([]string{"value", "updated_at"}),
}).Create(token).Error
if err != nil {
db.Logger.Error().Err(err).Msg("Failed to save token in the database")
return nil, err
}
return token, nil
}

View File

@@ -0,0 +1,24 @@
package db
import (
"seanime/internal/database/models"
)
func (db *Database) TrimTorrentstreamHistory() {
go func() {
var count int64
err := db.gormdb.Model(&models.TorrentstreamHistory{}).Count(&count).Error
if err != nil {
db.Logger.Error().Err(err).Msg("database: Failed to count torrent stream history entries")
return
}
if count > 50 {
// Leave 40 entries
err = db.gormdb.Delete(&models.TorrentstreamHistory{}, "id IN (SELECT id FROM torrentstream_histories ORDER BY updated_at ASC LIMIT ?)", 10).Error
if err != nil {
db.Logger.Error().Err(err).Msg("database: Failed to delete old torrent stream history entries")
return
}
}
}()
}

View File

@@ -0,0 +1,2 @@
The database may store some structs defined outside as `[]byte` inside `models`.
To avoid circular dependencies, we define methods that directly convert `[]byte` to the corresponding struct using the database to store/retrieve them.

View File

@@ -0,0 +1,109 @@
package db_bridge
import (
"github.com/goccy/go-json"
"seanime/internal/database/db"
"seanime/internal/database/models"
"seanime/internal/library/anime"
)
var CurrAutoDownloaderRules []*anime.AutoDownloaderRule
func GetAutoDownloaderRules(db *db.Database) ([]*anime.AutoDownloaderRule, error) {
//if CurrAutoDownloaderRules != nil {
// return CurrAutoDownloaderRules, nil
//}
var res []*models.AutoDownloaderRule
err := db.Gorm().Find(&res).Error
if err != nil {
return nil, err
}
// Unmarshal the data
var rules []*anime.AutoDownloaderRule
for _, r := range res {
smBytes := r.Value
var sm anime.AutoDownloaderRule
if err := json.Unmarshal(smBytes, &sm); err != nil {
return nil, err
}
sm.DbID = r.ID
rules = append(rules, &sm)
}
//CurrAutoDownloaderRules = rules
return rules, nil
}
func GetAutoDownloaderRule(db *db.Database, id uint) (*anime.AutoDownloaderRule, error) {
var res models.AutoDownloaderRule
err := db.Gorm().First(&res, id).Error
if err != nil {
return nil, err
}
// Unmarshal the data
smBytes := res.Value
var sm anime.AutoDownloaderRule
if err := json.Unmarshal(smBytes, &sm); err != nil {
return nil, err
}
sm.DbID = res.ID
return &sm, nil
}
func GetAutoDownloaderRulesByMediaId(db *db.Database, mediaId int) (ret []*anime.AutoDownloaderRule) {
rules, err := GetAutoDownloaderRules(db)
if err != nil {
return
}
for _, rule := range rules {
if rule.MediaId == mediaId {
ret = append(ret, rule)
}
}
return
}
func InsertAutoDownloaderRule(db *db.Database, sm *anime.AutoDownloaderRule) error {
CurrAutoDownloaderRules = nil
// Marshal the data
bytes, err := json.Marshal(sm)
if err != nil {
return err
}
// Save the data
return db.Gorm().Create(&models.AutoDownloaderRule{
Value: bytes,
}).Error
}
func DeleteAutoDownloaderRule(db *db.Database, id uint) error {
CurrAutoDownloaderRules = nil
return db.Gorm().Delete(&models.AutoDownloaderRule{}, id).Error
}
func UpdateAutoDownloaderRule(db *db.Database, id uint, sm *anime.AutoDownloaderRule) error {
CurrAutoDownloaderRules = nil
// Marshal the data
bytes, err := json.Marshal(sm)
if err != nil {
return err
}
// Save the data
return db.Gorm().Model(&models.AutoDownloaderRule{}).Where("id = ?", id).Update("value", bytes).Error
}

View File

@@ -0,0 +1,97 @@
package db_bridge
import (
"github.com/goccy/go-json"
"github.com/samber/mo"
"seanime/internal/database/db"
"seanime/internal/database/models"
"seanime/internal/library/anime"
)
var CurrLocalFilesDbId uint
var CurrLocalFiles mo.Option[[]*anime.LocalFile]
// GetLocalFiles will return the latest local files and the id of the entry.
func GetLocalFiles(db *db.Database) ([]*anime.LocalFile, uint, error) {
if CurrLocalFiles.IsPresent() {
return CurrLocalFiles.MustGet(), CurrLocalFilesDbId, nil
}
// Get the latest entry
var res models.LocalFiles
err := db.Gorm().Last(&res).Error
if err != nil {
return nil, 0, err
}
// Unmarshal the local files
lfsBytes := res.Value
var lfs []*anime.LocalFile
if err := json.Unmarshal(lfsBytes, &lfs); err != nil {
return nil, 0, err
}
db.Logger.Debug().Msg("db: Local files retrieved")
CurrLocalFiles = mo.Some(lfs)
CurrLocalFilesDbId = res.ID
return lfs, res.ID, nil
}
// SaveLocalFiles will save the local files in the database at the given id.
func SaveLocalFiles(db *db.Database, lfsId uint, lfs []*anime.LocalFile) ([]*anime.LocalFile, error) {
// Marshal the local files
marshaledLfs, err := json.Marshal(lfs)
if err != nil {
return nil, err
}
// Save the local files
ret, err := db.UpsertLocalFiles(&models.LocalFiles{
BaseModel: models.BaseModel{
ID: lfsId,
},
Value: marshaledLfs,
})
if err != nil {
return nil, err
}
// Unmarshal the saved local files
var retLfs []*anime.LocalFile
if err := json.Unmarshal(ret.Value, &retLfs); err != nil {
return lfs, nil
}
CurrLocalFiles = mo.Some(retLfs)
CurrLocalFilesDbId = ret.ID
return retLfs, nil
}
// InsertLocalFiles will insert the local files in the database at a new entry.
func InsertLocalFiles(db *db.Database, lfs []*anime.LocalFile) ([]*anime.LocalFile, error) {
// Marshal the local files
bytes, err := json.Marshal(lfs)
if err != nil {
return nil, err
}
// Save the local files to the database
ret, err := db.InsertLocalFiles(&models.LocalFiles{
Value: bytes,
})
if err != nil {
return nil, err
}
CurrLocalFiles = mo.Some(lfs)
CurrLocalFilesDbId = ret.ID
return lfs, nil
}

View File

@@ -0,0 +1,82 @@
package db_bridge
import (
"github.com/goccy/go-json"
"seanime/internal/database/db"
"seanime/internal/database/models"
"seanime/internal/library/anime"
)
func GetPlaylists(db *db.Database) ([]*anime.Playlist, error) {
var res []*models.PlaylistEntry
err := db.Gorm().Find(&res).Error
if err != nil {
return nil, err
}
playlists := make([]*anime.Playlist, 0)
for _, p := range res {
var localFiles []*anime.LocalFile
if err := json.Unmarshal(p.Value, &localFiles); err == nil {
playlist := anime.NewPlaylist(p.Name)
playlist.SetLocalFiles(localFiles)
playlist.DbId = p.ID
playlists = append(playlists, playlist)
}
}
return playlists, nil
}
func SavePlaylist(db *db.Database, playlist *anime.Playlist) error {
data, err := json.Marshal(playlist.LocalFiles)
if err != nil {
return err
}
playlistEntry := &models.PlaylistEntry{
Name: playlist.Name,
Value: data,
}
return db.Gorm().Save(playlistEntry).Error
}
func DeletePlaylist(db *db.Database, id uint) error {
return db.Gorm().Where("id = ?", id).Delete(&models.PlaylistEntry{}).Error
}
func UpdatePlaylist(db *db.Database, playlist *anime.Playlist) error {
data, err := json.Marshal(playlist.LocalFiles)
if err != nil {
return err
}
// Get the playlist entry
playlistEntry := &models.PlaylistEntry{}
if err := db.Gorm().Where("id = ?", playlist.DbId).First(playlistEntry).Error; err != nil {
return err
}
// Update the playlist entry
playlistEntry.Name = playlist.Name
playlistEntry.Value = data
return db.Gorm().Save(playlistEntry).Error
}
func GetPlaylist(db *db.Database, id uint) (*anime.Playlist, error) {
playlistEntry := &models.PlaylistEntry{}
if err := db.Gorm().Where("id = ?", id).First(playlistEntry).Error; err != nil {
return nil, err
}
var localFiles []*anime.LocalFile
if err := json.Unmarshal(playlistEntry.Value, &localFiles); err != nil {
return nil, err
}
playlist := anime.NewPlaylist(playlistEntry.Name)
playlist.SetLocalFiles(localFiles)
playlist.DbId = playlistEntry.ID
return playlist, nil
}

View File

@@ -0,0 +1,50 @@
package db_bridge
import (
"seanime/internal/database/db"
"seanime/internal/database/models"
"seanime/internal/library/summary"
"github.com/goccy/go-json"
)
func GetScanSummaries(database *db.Database) ([]*summary.ScanSummaryItem, error) {
var res []*models.ScanSummary
err := database.Gorm().Find(&res).Error
if err != nil {
return nil, err
}
// Unmarshal the data
var items []*summary.ScanSummaryItem
for _, r := range res {
smBytes := r.Value
var sm summary.ScanSummary
if err := json.Unmarshal(smBytes, &sm); err != nil {
return nil, err
}
items = append(items, &summary.ScanSummaryItem{
CreatedAt: r.CreatedAt,
ScanSummary: &sm,
})
}
return items, nil
}
func InsertScanSummary(db *db.Database, sm *summary.ScanSummary) error {
if sm == nil {
return nil
}
// Marshal the data
bytes, err := json.Marshal(sm)
if err != nil {
return err
}
// Save the data
return db.Gorm().Create(&models.ScanSummary{
Value: bytes,
}).Error
}

View File

@@ -0,0 +1,46 @@
package db_bridge
import (
"github.com/goccy/go-json"
"seanime/internal/database/db"
"seanime/internal/database/models"
hibiketorrent "seanime/internal/extension/hibike/torrent"
)
func GetTorrentstreamHistory(db *db.Database, mId int) (*hibiketorrent.AnimeTorrent, error) {
var history models.TorrentstreamHistory
if err := db.Gorm().Where("media_id = ?", mId).First(&history).Error; err != nil {
return nil, err
}
var torrent hibiketorrent.AnimeTorrent
if err := json.Unmarshal(history.Torrent, &torrent); err != nil {
return nil, err
}
return &torrent, nil
}
func InsertTorrentstreamHistory(db *db.Database, mId int, torrent *hibiketorrent.AnimeTorrent) error {
if torrent == nil {
return nil
}
// Marshal the data
bytes, err := json.Marshal(torrent)
if err != nil {
return err
}
// Get current history
var history models.TorrentstreamHistory
if err := db.Gorm().Where("media_id = ?", mId).First(&history).Error; err == nil {
// Update the history
history.Torrent = bytes
return db.Gorm().Save(&history).Error
}
return db.Gorm().Create(&models.TorrentstreamHistory{
MediaId: mId,
Torrent: bytes,
}).Error
}

View File

@@ -0,0 +1,511 @@
package models
import (
"database/sql/driver"
"errors"
"strconv"
"strings"
"time"
)
type BaseModel struct {
ID uint `gorm:"primarykey" json:"id"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type Token struct {
BaseModel
Value string `json:"value"`
}
type Account struct {
BaseModel
Username string `gorm:"column:username" json:"username"`
Token string `gorm:"column:token" json:"token"`
Viewer []byte `gorm:"column:viewer" json:"viewer"`
}
// +---------------------+
// | LocalFiles |
// +---------------------+
type LocalFiles struct {
BaseModel
Value []byte `gorm:"column:value" json:"value"`
}
// +---------------------+
// | Settings |
// +---------------------+
type Settings struct {
BaseModel
Library *LibrarySettings `gorm:"embedded" json:"library"`
MediaPlayer *MediaPlayerSettings `gorm:"embedded" json:"mediaPlayer"`
Torrent *TorrentSettings `gorm:"embedded" json:"torrent"`
Manga *MangaSettings `gorm:"embedded" json:"manga"`
Anilist *AnilistSettings `gorm:"embedded" json:"anilist"`
ListSync *ListSyncSettings `gorm:"embedded" json:"listSync"`
AutoDownloader *AutoDownloaderSettings `gorm:"embedded" json:"autoDownloader"`
Discord *DiscordSettings `gorm:"embedded" json:"discord"`
Notifications *NotificationSettings `gorm:"embedded" json:"notifications"`
Nakama *NakamaSettings `gorm:"embedded;embeddedPrefix:nakama_" json:"nakama"`
}
type AnilistSettings struct {
//AnilistClientId string `gorm:"column:anilist_client_id" json:"anilistClientId"`
HideAudienceScore bool `gorm:"column:hide_audience_score" json:"hideAudienceScore"`
EnableAdultContent bool `gorm:"column:enable_adult_content" json:"enableAdultContent"`
BlurAdultContent bool `gorm:"column:blur_adult_content" json:"blurAdultContent"`
}
type LibrarySettings struct {
LibraryPath string `gorm:"column:library_path" json:"libraryPath"`
AutoUpdateProgress bool `gorm:"column:auto_update_progress" json:"autoUpdateProgress"`
DisableUpdateCheck bool `gorm:"column:disable_update_check" json:"disableUpdateCheck"`
TorrentProvider string `gorm:"column:torrent_provider" json:"torrentProvider"`
AutoScan bool `gorm:"column:auto_scan" json:"autoScan"`
EnableOnlinestream bool `gorm:"column:enable_onlinestream" json:"enableOnlinestream"`
IncludeOnlineStreamingInLibrary bool `gorm:"column:include_online_streaming_in_library" json:"includeOnlineStreamingInLibrary"`
DisableAnimeCardTrailers bool `gorm:"column:disable_anime_card_trailers" json:"disableAnimeCardTrailers"`
EnableManga bool `gorm:"column:enable_manga" json:"enableManga"`
DOHProvider string `gorm:"column:doh_provider" json:"dohProvider"`
OpenTorrentClientOnStart bool `gorm:"column:open_torrent_client_on_start" json:"openTorrentClientOnStart"`
OpenWebURLOnStart bool `gorm:"column:open_web_url_on_start" json:"openWebURLOnStart"`
RefreshLibraryOnStart bool `gorm:"column:refresh_library_on_start" json:"refreshLibraryOnStart"`
// v2.1+
AutoPlayNextEpisode bool `gorm:"column:auto_play_next_episode" json:"autoPlayNextEpisode"`
// v2.2+
EnableWatchContinuity bool `gorm:"column:enable_watch_continuity" json:"enableWatchContinuity"`
LibraryPaths LibraryPaths `gorm:"column:library_paths;type:text" json:"libraryPaths"`
AutoSyncOfflineLocalData bool `gorm:"column:auto_sync_offline_local_data" json:"autoSyncOfflineLocalData"`
// v2.6+
ScannerMatchingThreshold float64 `gorm:"column:scanner_matching_threshold" json:"scannerMatchingThreshold"`
ScannerMatchingAlgorithm string `gorm:"column:scanner_matching_algorithm" json:"scannerMatchingAlgorithm"`
// v2.9+
AutoSyncToLocalAccount bool `gorm:"column:auto_sync_to_local_account" json:"autoSyncToLocalAccount"`
AutoSaveCurrentMediaOffline bool `gorm:"column:auto_save_current_media_offline" json:"autoSaveCurrentMediaOffline"`
}
func (o *LibrarySettings) GetLibraryPaths() (ret []string) {
ret = make([]string, len(o.LibraryPaths)+1)
ret[0] = o.LibraryPath
if len(o.LibraryPaths) > 0 {
copy(ret[1:], o.LibraryPaths)
}
return
}
type LibraryPaths []string
func (o *LibraryPaths) Scan(src interface{}) error {
str, ok := src.(string)
if !ok {
return errors.New("src value cannot cast to string")
}
*o = strings.Split(str, ",")
return nil
}
func (o LibraryPaths) Value() (driver.Value, error) {
if len(o) == 0 {
return nil, nil
}
return strings.Join(o, ","), nil
}
type NakamaSettings struct {
Enabled bool `gorm:"column:enabled" json:"enabled"`
// Username is the name used to identify a peer or host.
Username string `gorm:"column:username" json:"username"`
// IsHost allows the server to act as a host for other clients. This requires a password to be set.
IsHost bool `gorm:"column:is_host" json:"isHost"`
HostPassword string `gorm:"column:host_password" json:"hostPassword"`
RemoteServerURL string `gorm:"column:remote_server_url" json:"remoteServerURL"`
RemoteServerPassword string `gorm:"column:remote_server_password" json:"remoteServerPassword"`
// IncludeNakamaAnimeLibrary adds the local anime library of the host to the connected clients.
IncludeNakamaAnimeLibrary bool `gorm:"column:include_nakama_anime_library" json:"includeNakamaAnimeLibrary"`
// HostShareLocalAnimeLibrary shares the local anime library to connected clients
HostShareLocalAnimeLibrary bool `gorm:"column:host_share_local_anime_library" json:"hostShareLocalAnimeLibrary"`
// HostUnsharedAnimeIds is a list of anime IDs that should not be shared with connected clients.
HostUnsharedAnimeIds IntSlice `gorm:"column:host_unshared_anime_ids;type:text" json:"hostUnsharedAnimeIds"`
// HostEnablePortForwarding enables port forwarding.
HostEnablePortForwarding bool `gorm:"column:host_enable_port_forwarding" json:"hostEnablePortForwarding"`
}
type IntSlice []int
func (o *IntSlice) Scan(src interface{}) error {
str, ok := src.(string)
if !ok {
return errors.New("src value cannot cast to string")
}
ids := strings.Split(str, ",")
*o = make(IntSlice, len(ids))
for i, id := range ids {
(*o)[i], _ = strconv.Atoi(id)
}
return nil
}
func (o IntSlice) Value() (driver.Value, error) {
if len(o) == 0 {
return nil, nil
}
strs := make([]string, len(o))
for i, id := range o {
strs[i] = strconv.Itoa(id)
}
return strings.Join(strs, ","), nil
}
type MangaSettings struct {
DefaultProvider string `gorm:"column:default_manga_provider" json:"defaultMangaProvider"`
AutoUpdateProgress bool `gorm:"column:manga_auto_update_progress" json:"mangaAutoUpdateProgress"`
LocalSourceDirectory string `gorm:"column:manga_local_source_directory" json:"mangaLocalSourceDirectory"`
}
type MediaPlayerSettings struct {
Default string `gorm:"column:default_player" json:"defaultPlayer"` // "vlc" or "mpc-hc"
Host string `gorm:"column:player_host" json:"host"`
VlcUsername string `gorm:"column:vlc_username" json:"vlcUsername"`
VlcPassword string `gorm:"column:vlc_password" json:"vlcPassword"`
VlcPort int `gorm:"column:vlc_port" json:"vlcPort"`
VlcPath string `gorm:"column:vlc_path" json:"vlcPath"`
MpcPort int `gorm:"column:mpc_port" json:"mpcPort"`
MpcPath string `gorm:"column:mpc_path" json:"mpcPath"`
MpvSocket string `gorm:"column:mpv_socket" json:"mpvSocket"`
MpvPath string `gorm:"column:mpv_path" json:"mpvPath"`
MpvArgs string `gorm:"column:mpv_args" json:"mpvArgs"`
IinaSocket string `gorm:"column:iina_socket" json:"iinaSocket"`
IinaPath string `gorm:"column:iina_path" json:"iinaPath"`
IinaArgs string `gorm:"column:iina_args" json:"iinaArgs"`
}
type TorrentSettings struct {
Default string `gorm:"column:default_torrent_client" json:"defaultTorrentClient"`
QBittorrentPath string `gorm:"column:qbittorrent_path" json:"qbittorrentPath"`
QBittorrentHost string `gorm:"column:qbittorrent_host" json:"qbittorrentHost"`
QBittorrentPort int `gorm:"column:qbittorrent_port" json:"qbittorrentPort"`
QBittorrentUsername string `gorm:"column:qbittorrent_username" json:"qbittorrentUsername"`
QBittorrentPassword string `gorm:"column:qbittorrent_password" json:"qbittorrentPassword"`
QBittorrentTags string `gorm:"column:qbittorrent_tags" json:"qbittorrentTags"`
TransmissionPath string `gorm:"column:transmission_path" json:"transmissionPath"`
TransmissionHost string `gorm:"column:transmission_host" json:"transmissionHost"`
TransmissionPort int `gorm:"column:transmission_port" json:"transmissionPort"`
TransmissionUsername string `gorm:"column:transmission_username" json:"transmissionUsername"`
TransmissionPassword string `gorm:"column:transmission_password" json:"transmissionPassword"`
// v2.1+
ShowActiveTorrentCount bool `gorm:"column:show_active_torrent_count" json:"showActiveTorrentCount"`
// v2.2+
HideTorrentList bool `gorm:"column:hide_torrent_list" json:"hideTorrentList"`
}
type ListSyncSettings struct {
Automatic bool `gorm:"column:automatic_sync" json:"automatic"`
Origin string `gorm:"column:sync_origin" json:"origin"`
}
type DiscordSettings struct {
EnableRichPresence bool `gorm:"column:enable_rich_presence" json:"enableRichPresence"`
EnableAnimeRichPresence bool `gorm:"column:enable_anime_rich_presence" json:"enableAnimeRichPresence"`
EnableMangaRichPresence bool `gorm:"column:enable_manga_rich_presence" json:"enableMangaRichPresence"`
RichPresenceHideSeanimeRepositoryButton bool `gorm:"column:rich_presence_hide_seanime_repository_button" json:"richPresenceHideSeanimeRepositoryButton"`
RichPresenceShowAniListMediaButton bool `gorm:"column:rich_presence_show_anilist_media_button" json:"richPresenceShowAniListMediaButton"`
RichPresenceShowAniListProfileButton bool `gorm:"column:rich_presence_show_anilist_profile_button" json:"richPresenceShowAniListProfileButton"`
RichPresenceUseMediaTitleStatus bool `gorm:"column:rich_presence_use_media_title_status;default:true" json:"richPresenceUseMediaTitleStatus"`
}
type NotificationSettings struct {
DisableNotifications bool `gorm:"column:disable_notifications" json:"disableNotifications"`
DisableAutoDownloaderNotifications bool `gorm:"column:disable_auto_downloader_notifications" json:"disableAutoDownloaderNotifications"`
DisableAutoScannerNotifications bool `gorm:"column:disable_auto_scanner_notifications" json:"disableAutoScannerNotifications"`
}
// +---------------------+
// | MAL |
// +---------------------+
type Mal struct {
BaseModel
Username string `gorm:"column:username" json:"username"`
AccessToken string `gorm:"column:access_token" json:"accessToken"`
RefreshToken string `gorm:"column:refresh_token" json:"refreshToken"`
TokenExpiresAt time.Time `gorm:"column:token_expires_at" json:"tokenExpiresAt"`
}
// +---------------------+
// | Scan Summary |
// +---------------------+
type ScanSummary struct {
BaseModel
Value []byte `gorm:"column:value" json:"value"`
}
// +---------------------+
// | Auto downloader |
// +---------------------+
type AutoDownloaderRule struct {
BaseModel
Value []byte `gorm:"column:value" json:"value"`
}
type AutoDownloaderItem struct {
BaseModel
RuleID uint `gorm:"column:rule_id" json:"ruleId"`
MediaID int `gorm:"column:media_id" json:"mediaId"`
Episode int `gorm:"column:episode" json:"episode"`
Link string `gorm:"column:link" json:"link"`
Hash string `gorm:"column:hash" json:"hash"`
Magnet string `gorm:"column:magnet" json:"magnet"`
TorrentName string `gorm:"column:torrent_name" json:"torrentName"`
Downloaded bool `gorm:"column:downloaded" json:"downloaded"`
}
type AutoDownloaderSettings struct {
Provider string `gorm:"column:auto_downloader_provider" json:"provider"`
Interval int `gorm:"column:auto_downloader_interval" json:"interval"`
Enabled bool `gorm:"column:auto_downloader_enabled" json:"enabled"`
DownloadAutomatically bool `gorm:"column:auto_downloader_download_automatically" json:"downloadAutomatically"`
EnableEnhancedQueries bool `gorm:"column:auto_downloader_enable_enhanced_queries" json:"enableEnhancedQueries"`
EnableSeasonCheck bool `gorm:"column:auto_downloader_enable_season_check" json:"enableSeasonCheck"`
UseDebrid bool `gorm:"column:auto_downloader_use_debrid" json:"useDebrid"`
}
// +---------------------+
// | Media Entry |
// +---------------------+
type SilencedMediaEntry struct {
BaseModel
}
// +---------------------+
// | Theme |
// +---------------------+
type Theme struct {
BaseModel
// Main
EnableColorSettings bool `gorm:"column:enable_color_settings" json:"enableColorSettings"`
BackgroundColor string `gorm:"column:background_color" json:"backgroundColor"`
AccentColor string `gorm:"column:accent_color" json:"accentColor"`
SidebarBackgroundColor string `gorm:"column:sidebar_background_color" json:"sidebarBackgroundColor"` // DEPRECATED
AnimeEntryScreenLayout string `gorm:"column:anime_entry_screen_layout" json:"animeEntryScreenLayout"` // DEPRECATED
ExpandSidebarOnHover bool `gorm:"column:expand_sidebar_on_hover" json:"expandSidebarOnHover"`
HideTopNavbar bool `gorm:"column:hide_top_navbar" json:"hideTopNavbar"`
EnableMediaCardBlurredBackground bool `gorm:"column:enable_media_card_blurred_background" json:"enableMediaCardBlurredBackground"`
// Note: These are named "libraryScreen" but are used on all pages
LibraryScreenCustomBackgroundImage string `gorm:"column:library_screen_custom_background_image" json:"libraryScreenCustomBackgroundImage"`
LibraryScreenCustomBackgroundOpacity int `gorm:"column:library_screen_custom_background_opacity" json:"libraryScreenCustomBackgroundOpacity"`
// Anime
SmallerEpisodeCarouselSize bool `gorm:"column:smaller_episode_carousel_size" json:"smallerEpisodeCarouselSize"`
// Library Screen (Anime & Manga)
// LibraryScreenBannerType: "dynamic", "custom"
LibraryScreenBannerType string `gorm:"column:library_screen_banner_type" json:"libraryScreenBannerType"`
LibraryScreenCustomBannerImage string `gorm:"column:library_screen_custom_banner_image" json:"libraryScreenCustomBannerImage"`
LibraryScreenCustomBannerPosition string `gorm:"column:library_screen_custom_banner_position" json:"libraryScreenCustomBannerPosition"`
LibraryScreenCustomBannerOpacity int `gorm:"column:library_screen_custom_banner_opacity" json:"libraryScreenCustomBannerOpacity"`
DisableLibraryScreenGenreSelector bool `gorm:"column:disable_library_screen_genre_selector" json:"disableLibraryScreenGenreSelector"`
LibraryScreenCustomBackgroundBlur string `gorm:"column:library_screen_custom_background_blur" json:"libraryScreenCustomBackgroundBlur"`
EnableMediaPageBlurredBackground bool `gorm:"column:enable_media_page_blurred_background" json:"enableMediaPageBlurredBackground"`
DisableSidebarTransparency bool `gorm:"column:disable_sidebar_transparency" json:"disableSidebarTransparency"`
UseLegacyEpisodeCard bool `gorm:"column:use_legacy_episode_card" json:"useLegacyEpisodeCard"` // DEPRECATED
DisableCarouselAutoScroll bool `gorm:"column:disable_carousel_auto_scroll" json:"disableCarouselAutoScroll"`
// v2.6+
MediaPageBannerType string `gorm:"column:media_page_banner_type" json:"mediaPageBannerType"`
MediaPageBannerSize string `gorm:"column:media_page_banner_size" json:"mediaPageBannerSize"`
MediaPageBannerInfoBoxSize string `gorm:"column:media_page_banner_info_box_size" json:"mediaPageBannerInfoBoxSize"`
// v2.7+
ShowEpisodeCardAnimeInfo bool `gorm:"column:show_episode_card_anime_info" json:"showEpisodeCardAnimeInfo"`
ContinueWatchingDefaultSorting string `gorm:"column:continue_watching_default_sorting" json:"continueWatchingDefaultSorting"`
AnimeLibraryCollectionDefaultSorting string `gorm:"column:anime_library_collection_default_sorting" json:"animeLibraryCollectionDefaultSorting"`
MangaLibraryCollectionDefaultSorting string `gorm:"column:manga_library_collection_default_sorting" json:"mangaLibraryCollectionDefaultSorting"`
ShowAnimeUnwatchedCount bool `gorm:"column:show_anime_unwatched_count" json:"showAnimeUnwatchedCount"`
ShowMangaUnreadCount bool `gorm:"column:show_manga_unread_count" json:"showMangaUnreadCount"`
// v2.8+
HideEpisodeCardDescription bool `gorm:"column:hide_episode_card_description" json:"hideEpisodeCardDescription"`
HideDownloadedEpisodeCardFilename bool `gorm:"column:hide_downloaded_episode_card_filename" json:"hideDownloadedEpisodeCardFilename"`
CustomCSS string `gorm:"column:custom_css" json:"customCSS"`
MobileCustomCSS string `gorm:"column:mobile_custom_css" json:"mobileCustomCSS"`
// v2.9+
UnpinnedMenuItems StringSlice `gorm:"column:unpinned_menu_items;type:text" json:"unpinnedMenuItems"`
}
// +---------------------+
// | Playlist |
// +---------------------+
type PlaylistEntry struct {
BaseModel
Name string `gorm:"column:name" json:"name"`
Value []byte `gorm:"column:value" json:"value"`
}
// +------------------------+
// | Chapter Download Queue |
// +------------------------+
type ChapterDownloadQueueItem struct {
BaseModel
Provider string `gorm:"column:provider" json:"provider"`
MediaID int `gorm:"column:media_id" json:"mediaId"`
ChapterID string `gorm:"column:chapter_id" json:"chapterId"`
ChapterNumber string `gorm:"column:chapter_number" json:"chapterNumber"`
PageData []byte `gorm:"column:page_data" json:"pageData"` // Contains map of page index to page details
Status string `gorm:"column:status" json:"status"`
}
// +---------------------+
// | MediaStream |
// +---------------------+
type MediastreamSettings struct {
BaseModel
// DEVNOTE: Should really be "Enabled"
TranscodeEnabled bool `gorm:"column:transcode_enabled" json:"transcodeEnabled"`
TranscodeHwAccel string `gorm:"column:transcode_hw_accel" json:"transcodeHwAccel"`
TranscodeThreads int `gorm:"column:transcode_threads" json:"transcodeThreads"`
TranscodePreset string `gorm:"column:transcode_preset" json:"transcodePreset"`
DisableAutoSwitchToDirectPlay bool `gorm:"column:disable_auto_switch_to_direct_play" json:"disableAutoSwitchToDirectPlay"`
DirectPlayOnly bool `gorm:"column:direct_play_only" json:"directPlayOnly"`
PreTranscodeEnabled bool `gorm:"column:pre_transcode_enabled" json:"preTranscodeEnabled"`
PreTranscodeLibraryDir string `gorm:"column:pre_transcode_library_dir" json:"preTranscodeLibraryDir"`
FfmpegPath string `gorm:"column:ffmpeg_path" json:"ffmpegPath"`
FfprobePath string `gorm:"column:ffprobe_path" json:"ffprobePath"`
// v2.2+
TranscodeHwAccelCustomSettings string `gorm:"column:transcode_hw_accel_custom_settings" json:"transcodeHwAccelCustomSettings"`
//TranscodeTempDir string `gorm:"column:transcode_temp_dir" json:"transcodeTempDir"` // DEPRECATED
}
// +---------------------+
// | TorrentStream |
// +---------------------+
type TorrentstreamSettings struct {
BaseModel
Enabled bool `gorm:"column:enabled" json:"enabled"`
AutoSelect bool `gorm:"column:auto_select" json:"autoSelect"`
PreferredResolution string `gorm:"column:preferred_resolution" json:"preferredResolution"`
DisableIPV6 bool `gorm:"column:disable_ipv6" json:"disableIPV6"`
DownloadDir string `gorm:"column:download_dir" json:"downloadDir"`
AddToLibrary bool `gorm:"column:add_to_library" json:"addToLibrary"`
TorrentClientHost string `gorm:"column:torrent_client_host" json:"torrentClientHost"`
TorrentClientPort int `gorm:"column:torrent_client_port" json:"torrentClientPort"`
StreamingServerHost string `gorm:"column:streaming_server_host" json:"streamingServerHost"`
StreamingServerPort int `gorm:"column:streaming_server_port" json:"streamingServerPort"`
//FallbackToTorrentStreamingView bool `gorm:"column:fallback_to_torrent_streaming_view" json:"fallbackToTorrentStreamingView"` // DEPRECATED
IncludeInLibrary bool `gorm:"column:include_in_library" json:"includeInLibrary"`
// v2.6+
StreamUrlAddress string `gorm:"column:stream_url_address" json:"streamUrlAddress"`
// v2.7+
SlowSeeding bool `gorm:"column:slow_seeding" json:"slowSeeding"`
}
type TorrentstreamHistory struct {
BaseModel
MediaId int `gorm:"column:media_id" json:"mediaId"`
Torrent []byte `gorm:"column:torrent" json:"torrent"`
}
// +---------------------+
// | Filler |
// +---------------------+
type MediaFiller struct {
BaseModel
Provider string `gorm:"column:provider" json:"provider"`
Slug string `gorm:"column:slug" json:"slug"`
MediaID int `gorm:"column:media_id" json:"mediaId"`
LastFetchedAt time.Time `gorm:"column:last_fetched_at" json:"lastFetchedAt"`
Data []byte `gorm:"column:data" json:"data"`
}
// +---------------------+
// | Manga |
// +---------------------+
type MangaMapping struct {
BaseModel
Provider string `gorm:"column:provider" json:"provider"`
MediaID int `gorm:"column:media_id" json:"mediaId"`
MangaID string `gorm:"column:manga_id" json:"mangaId"` // ID from search result, used to fetch chapters
}
type MangaChapterContainer struct {
BaseModel
Provider string `gorm:"column:provider" json:"provider"`
MediaID int `gorm:"column:media_id" json:"mediaId"`
ChapterID string `gorm:"column:chapter_id" json:"chapterId"`
Data []byte `gorm:"column:data" json:"data"`
}
// +---------------------+
// | Online streaming |
// +---------------------+
type OnlinestreamMapping struct {
BaseModel
Provider string `gorm:"column:provider" json:"provider"`
MediaID int `gorm:"column:media_id" json:"mediaId"`
AnimeID string `gorm:"column:anime_id" json:"anime_id"` // ID from search result, used to fetch episodes
}
// +---------------------+
// | Debrid |
// +---------------------+
type DebridSettings struct {
BaseModel
Enabled bool `gorm:"column:enabled" json:"enabled"`
Provider string `gorm:"column:provider" json:"provider"`
ApiKey string `gorm:"column:api_key" json:"apiKey"`
//FallbackToDebridStreamingView bool `gorm:"column:fallback_to_debrid_streaming_view" json:"fallbackToDebridStreamingView"` // DEPRECATED
IncludeDebridStreamInLibrary bool `gorm:"column:include_debrid_stream_in_library" json:"includeDebridStreamInLibrary"`
StreamAutoSelect bool `gorm:"column:stream_auto_select" json:"streamAutoSelect"`
StreamPreferredResolution string `gorm:"column:stream_preferred_resolution" json:"streamPreferredResolution"`
}
type DebridTorrentItem struct {
BaseModel
TorrentItemID string `gorm:"column:torrent_item_id" json:"torrentItemId"`
Destination string `gorm:"column:destination" json:"destination"`
Provider string `gorm:"column:provider" json:"provider"`
MediaId int `gorm:"column:media_id" json:"mediaId"`
}
// +---------------------+
// | Plugin |
// +---------------------+
type PluginData struct {
BaseModel
PluginID string `gorm:"column:plugin_id;index" json:"pluginId"`
Data []byte `gorm:"column:data" json:"data"`
}
///////////////////////////////////////////////////////////////////////////
type StringSlice []string
func (o *StringSlice) Scan(src interface{}) error {
str, ok := src.(string)
if !ok {
return errors.New("src value cannot cast to string")
}
*o = strings.Split(str, ",")
return nil
}
func (o StringSlice) Value() (driver.Value, error) {
if len(o) == 0 {
return nil, nil
}
return strings.Join(o, ","), nil
}

Some files were not shown because too many files have changed in this diff Show More