node build fixed

This commit is contained in:
ra_ma
2025-09-20 14:08:38 +01:00
parent c6ebbe069d
commit 3d298fa434
1516 changed files with 535727 additions and 2 deletions

View File

@@ -0,0 +1,293 @@
package torrent_analyzer
import (
"errors"
"path/filepath"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/library/anime"
"seanime/internal/library/scanner"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"github.com/rs/zerolog"
lop "github.com/samber/lo/parallel"
)
type (
// Analyzer is a service similar to the scanner, but it is used to analyze torrent files.
// i.e. torrent files instead of local files.
Analyzer struct {
files []*File
media *anilist.CompleteAnime
platform platform.Platform
logger *zerolog.Logger
metadataProvider metadata.Provider
forceMatch bool
}
// Analysis contains the results of the analysis.
Analysis struct {
files []*File // Hydrated after scanFiles is called
selectedFiles []*File // Hydrated after findCorrespondingFiles is called
media *anilist.CompleteAnime
}
// File represents a torrent file and contains its metadata.
File struct {
index int
path string
localFile *anime.LocalFile
}
)
type (
NewAnalyzerOptions struct {
Logger *zerolog.Logger
Filepaths []string // Filepath of the torrent files
Media *anilist.CompleteAnime // The media to compare the files with
Platform platform.Platform
MetadataProvider metadata.Provider
// This basically skips the matching process and forces the media ID to be set.
// Used for the auto-select feature because the media is already known.
ForceMatch bool
}
)
func NewAnalyzer(opts *NewAnalyzerOptions) *Analyzer {
files := lop.Map(opts.Filepaths, func(filepath string, idx int) *File {
return newFile(idx, filepath)
})
return &Analyzer{
files: files,
media: opts.Media,
platform: opts.Platform,
logger: opts.Logger,
metadataProvider: opts.MetadataProvider,
forceMatch: opts.ForceMatch,
}
}
// AnalyzeTorrentFiles scans the files and returns an Analysis struct containing methods to get the results.
func (a *Analyzer) AnalyzeTorrentFiles() (*Analysis, error) {
if a.platform == nil {
return nil, errors.New("anilist client wrapper is nil")
}
if err := a.scanFiles(); err != nil {
return nil, err
}
analysis := &Analysis{
files: a.files,
media: a.media,
}
return analysis, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (a *Analysis) GetCorrespondingFiles() map[int]*File {
ret, _ := a.getCorrespondingFiles(func(f *File) bool {
return true
})
return ret
}
func (a *Analysis) GetCorrespondingMainFiles() map[int]*File {
ret, _ := a.getCorrespondingFiles(func(f *File) bool {
return f.localFile.IsMain()
})
return ret
}
func (a *Analysis) GetMainFileByEpisode(episodeNumber int) (*File, bool) {
ret, _ := a.getCorrespondingFiles(func(f *File) bool {
return f.localFile.IsMain()
})
for _, f := range ret {
if f.localFile.Metadata.Episode == episodeNumber {
return f, true
}
}
return nil, false
}
func (a *Analysis) GetFileByAniDBEpisode(episode string) (*File, bool) {
for _, f := range a.files {
if f.localFile.Metadata.AniDBEpisode == episode {
return f, true
}
}
return nil, false
}
func (a *Analysis) GetUnselectedFiles() map[int]*File {
_, uRet := a.getCorrespondingFiles(func(f *File) bool {
return true
})
return uRet
}
func (a *Analysis) getCorrespondingFiles(filter func(f *File) bool) (map[int]*File, map[int]*File) {
ret := make(map[int]*File)
uRet := make(map[int]*File)
for _, af := range a.files {
if af.localFile.MediaId == a.media.ID {
if filter(af) {
ret[af.index] = af
} else {
uRet[af.index] = af
}
} else {
uRet[af.index] = af
}
}
return ret, uRet
}
// GetIndices returns the indices of the files.
//
// Example:
//
// selectedFilesMap := analysis.GetCorrespondingMainFiles()
// selectedIndices := analysis.GetIndices(selectedFilesMap)
func (a *Analysis) GetIndices(files map[int]*File) []int {
indices := make([]int, 0)
for i := range files {
indices = append(indices, i)
}
return indices
}
func (a *Analysis) GetFiles() []*File {
return a.files
}
// GetUnselectedIndices takes a map of selected files and returns the indices of the unselected files.
//
// Example:
//
// analysis, _ := analyzer.AnalyzeTorrentFiles()
// selectedFiles := analysis.GetCorrespondingMainFiles()
// indicesToRemove := analysis.GetUnselectedIndices(selectedFiles)
func (a *Analysis) GetUnselectedIndices(files map[int]*File) []int {
indices := make([]int, 0)
for i := range a.files {
if _, ok := files[i]; !ok {
indices = append(indices, i)
}
}
return indices
}
func (f *File) GetLocalFile() *anime.LocalFile {
return f.localFile
}
func (f *File) GetIndex() int {
return f.index
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// scanFiles scans the files and matches them with the media.
func (a *Analyzer) scanFiles() error {
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
lfs := a.getLocalFiles() // Extract local files from the Files
// +---------------------+
// | MediaContainer |
// +---------------------+
tree := anilist.NewCompleteAnimeRelationTree()
if err := a.media.FetchMediaTree(anilist.FetchMediaTreeAll, a.platform.GetAnilistClient(), anilistRateLimiter, tree, completeAnimeCache); err != nil {
return err
}
allMedia := tree.Values()
mc := scanner.NewMediaContainer(&scanner.MediaContainerOptions{
AllMedia: allMedia,
})
//scanLogger, _ := scanner.NewScanLogger("./logs")
// +---------------------+
// | Matcher |
// +---------------------+
matcher := &scanner.Matcher{
LocalFiles: lfs,
MediaContainer: mc,
CompleteAnimeCache: completeAnimeCache,
Logger: util.NewLogger(),
ScanLogger: nil,
ScanSummaryLogger: nil,
}
err := matcher.MatchLocalFilesWithMedia()
if err != nil {
return err
}
if a.forceMatch {
for _, lf := range lfs {
lf.MediaId = a.media.GetID()
}
}
// +---------------------+
// | FileHydrator |
// +---------------------+
fh := &scanner.FileHydrator{
LocalFiles: lfs,
AllMedia: mc.NormalizedMedia,
CompleteAnimeCache: completeAnimeCache,
Platform: a.platform,
MetadataProvider: a.metadataProvider,
AnilistRateLimiter: anilistRateLimiter,
Logger: a.logger,
ScanLogger: nil,
ScanSummaryLogger: nil,
ForceMediaId: map[bool]int{true: a.media.GetID(), false: 0}[a.forceMatch],
}
fh.HydrateMetadata()
for _, af := range a.files {
for _, lf := range lfs {
if lf.Path == af.localFile.Path {
af.localFile = lf // Update the local file in the File
break
}
}
}
return nil
}
// newFile creates a new File from a file path.
func newFile(idx int, path string) *File {
path = filepath.ToSlash(path)
return &File{
index: idx,
path: path,
localFile: anime.NewLocalFile(path, ""),
}
}
func (a *Analyzer) getLocalFiles() []*anime.LocalFile {
files := make([]*anime.LocalFile, len(a.files))
for i, f := range a.files {
files[i] = f.localFile
}
return files
}

View File

@@ -0,0 +1,110 @@
package torrent_analyzer
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/assert"
)
// TestSelectFilesFromSeason tests the selection of the accurate season files from a list of files from all seasons.
func TestSelectFilesFromSeason(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
logger := util.NewLogger()
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mediaId int // The media ID of the season
filepaths []string // All filepaths from all seasons
expectedIndices []int // The indices of the selected files
}{
{
name: "Kakegurui xx",
filepaths: []string{
"Kakegurui [BD][1080p][HEVC 10bit x265][Dual Audio][Tenrai-Sensei]/Season 1/Kakegurui - S01E01 - The Woman Called Yumeko Jabami.mkv", // should be selected
"Kakegurui [BD][1080p][HEVC 10bit x265][Dual Audio][Tenrai-Sensei]/Season 2/Kakegurui xx - S02E01 - The Woman Called Yumeko Jabami.mkv",
},
mediaId: 98314,
expectedIndices: []int{0},
},
{
name: "Kimi ni Todoke Season 2",
filepaths: []string{
"[Judas] Kimi ni Todoke (Seasons 1-2) [BD 1080p][HEVC x265 10bit][Eng-Subs]/[Judas] Kimi ni Todoke S1/[Judas] Kimi ni Todoke - S01E01.mkv",
"[Judas] Kimi ni Todoke (Seasons 1-2) [BD 1080p][HEVC x265 10bit][Eng-Subs]/[Judas] Kimi ni Todoke S1/[Judas] Kimi ni Todoke - S01E02.mkv",
"[Judas] Kimi ni Todoke (Seasons 1-2) [BD 1080p][HEVC x265 10bit][Eng-Subs]/[Judas] Kimi ni Todoke S2/[Judas] Kimi ni Todoke - S02E01.mkv", // should be selected
"[Judas] Kimi ni Todoke (Seasons 1-2) [BD 1080p][HEVC x265 10bit][Eng-Subs]/[Judas] Kimi ni Todoke S2/[Judas] Kimi ni Todoke - S02E02.mkv", // should be selected
},
mediaId: 9656,
expectedIndices: []int{2, 3},
},
{
name: "Spy x Family Part 2",
filepaths: []string{
"[SubsPlease] Spy x Family (01-25) (1080p) [Batch]/[SubsPlease] Spy x Family - 10v2 (1080p) [F9F5C62B].mkv",
"[SubsPlease] Spy x Family (01-25) (1080p) [Batch]/[SubsPlease] Spy x Family - 11v2 (1080p) [F9F5C62B].mkv",
"[SubsPlease] Spy x Family (01-25) (1080p) [Batch]/[SubsPlease] Spy x Family - 12v2 (1080p) [F9F5C62B].mkv",
"[SubsPlease] Spy x Family (01-25) (1080p) [Batch]/[SubsPlease] Spy x Family - 13v2 (1080p) [F9F5C62B].mkv", // should be selected
"[SubsPlease] Spy x Family (01-25) (1080p) [Batch]/[SubsPlease] Spy x Family - 14v2 (1080p) [F9F5C62B].mkv", // should be selected
"[SubsPlease] Spy x Family (01-25) (1080p) [Batch]/[SubsPlease] Spy x Family - 15v2 (1080p) [F9F5C62B].mkv", // should be selected
},
mediaId: 142838,
expectedIndices: []int{3, 4, 5},
},
{
name: "Mushoku Tensei: Jobless Reincarnation Season 2 Part 2",
filepaths: []string{
"[EMBER] Mushoku Tensei S2 - 13.mkv", // should be selected
"[EMBER] Mushoku Tensei S2 - 14.mkv", // should be selected
"[EMBER] Mushoku Tensei S2 - 15.mkv", // should be selected
"[EMBER] Mushoku Tensei S2 - 16.mkv", // should be selected
},
mediaId: 166873,
expectedIndices: []int{0, 1, 2, 3},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Get media
media, err := anilistPlatform.GetAnimeWithRelations(t.Context(), tt.mediaId)
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
analyzer := NewAnalyzer(&NewAnalyzerOptions{
Logger: logger,
Filepaths: tt.filepaths,
Media: media,
Platform: anilistPlatform,
MetadataProvider: metadataProvider,
ForceMatch: false,
})
// AnalyzeTorrentFiles
analysis, err := analyzer.AnalyzeTorrentFiles()
if assert.NoError(t, err) {
selectedFilesMap := analysis.GetCorrespondingMainFiles()
selectedIndices := analysis.GetIndices(selectedFilesMap)
// Check selected files
assert.ElementsMatch(t, tt.expectedIndices, selectedIndices)
}
})
}
}

View File

@@ -0,0 +1,32 @@
package animetosho
type (
Torrent struct {
Id int `json:"id"`
Title string `json:"title"`
Link string `json:"link"`
Timestamp int `json:"timestamp"`
Status string `json:"status"`
ToshoId int `json:"tosho_id,omitempty"`
NyaaId int `json:"nyaa_id,omitempty"`
NyaaSubdom interface{} `json:"nyaa_subdom,omitempty"`
AniDexId int `json:"anidex_id,omitempty"`
TorrentUrl string `json:"torrent_url"`
InfoHash string `json:"info_hash"`
InfoHashV2 string `json:"info_hash_v2,omitempty"`
MagnetUri string `json:"magnet_uri"`
Seeders int `json:"seeders"`
Leechers int `json:"leechers"`
TorrentDownloadCount int `json:"torrent_download_count"`
TrackerUpdated interface{} `json:"tracker_updated,omitempty"`
NzbUrl string `json:"nzb_url,omitempty"`
TotalSize int64 `json:"total_size"`
NumFiles int `json:"num_files"`
AniDbAid int `json:"anidb_aid"`
AniDbEid int `json:"anidb_eid"`
AniDbFid int `json:"anidb_fid"`
ArticleUrl string `json:"article_url"`
ArticleTitle string `json:"article_title"`
WebsiteUrl string `json:"website_url"`
}
)

View File

@@ -0,0 +1,731 @@
package animetosho
import (
"bytes"
"fmt"
"io"
"net/http"
"net/url"
"regexp"
"seanime/internal/api/anilist"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/util"
"strings"
"sync"
"time"
"github.com/5rahim/habari"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
"github.com/samber/lo"
)
var (
JsonFeedUrl = util.Decode("aHR0cHM6Ly9mZWVkLmFuaW1ldG9zaG8ub3JnL2pzb24=")
ProviderName = "animetosho"
)
type (
Provider struct {
logger *zerolog.Logger
sneedexNyaaIDs map[int]struct{}
}
)
func NewProvider(logger *zerolog.Logger) hibiketorrent.AnimeProvider {
ret := &Provider{
logger: logger,
sneedexNyaaIDs: make(map[int]struct{}),
}
go ret.loadSneedex()
return ret
}
func (at *Provider) GetSettings() hibiketorrent.AnimeProviderSettings {
return hibiketorrent.AnimeProviderSettings{
Type: hibiketorrent.AnimeProviderTypeMain,
CanSmartSearch: true,
SmartSearchFilters: []hibiketorrent.AnimeProviderSmartSearchFilter{
hibiketorrent.AnimeProviderSmartSearchFilterBatch,
hibiketorrent.AnimeProviderSmartSearchFilterEpisodeNumber,
hibiketorrent.AnimeProviderSmartSearchFilterResolution,
hibiketorrent.AnimeProviderSmartSearchFilterBestReleases,
},
SupportsAdult: false,
}
}
// GetLatest returns all the latest torrents currently visible on the site
func (at *Provider) GetLatest() (ret []*hibiketorrent.AnimeTorrent, err error) {
at.logger.Debug().Msg("animetosho: Fetching latest torrents")
query := "?q="
torrents, err := at.fetchTorrents(query)
if err != nil {
return nil, err
}
ret = at.torrentSliceToAnimeTorrentSlice(torrents, false, &hibiketorrent.Media{})
return ret, nil
}
func (at *Provider) Search(opts hibiketorrent.AnimeSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
at.logger.Debug().Str("query", opts.Query).Msg("animetosho: Searching for torrents")
query := fmt.Sprintf("?q=%s", url.QueryEscape(sanitizeTitle(opts.Query)))
atTorrents, err := at.fetchTorrents(query)
if err != nil {
return nil, err
}
ret = at.torrentSliceToAnimeTorrentSlice(atTorrents, false, &opts.Media)
return ret, nil
}
func (at *Provider) SmartSearch(opts hibiketorrent.AnimeSmartSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
if opts.BestReleases {
return at.smartSearchBestReleases(&opts)
}
if opts.Batch {
return at.smartSearchBatch(&opts)
}
return at.smartSearchSingleEpisode(&opts)
}
func (at *Provider) smartSearchSingleEpisode(opts *hibiketorrent.AnimeSmartSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
ret = make([]*hibiketorrent.AnimeTorrent, 0)
at.logger.Debug().Int("aid", opts.AnidbAID).Msg("animetosho: Searching batches by Episode ID")
foundByID := false
atTorrents := make([]*Torrent, 0)
if opts.AnidbEID > 0 {
// Get all torrents by Episode ID
atTorrents, err = at.searchByEID(opts.AnidbEID, opts.Resolution)
if err != nil {
return nil, err
}
foundByID = true
}
if foundByID {
// Get all torrents with only 1 file
atTorrents = lo.Filter(atTorrents, func(t *Torrent, _ int) bool {
return t.NumFiles == 1
})
ret = at.torrentSliceToAnimeTorrentSlice(atTorrents, true, &opts.Media)
return
}
at.logger.Debug().Msg("animetosho: Searching batches by query")
// If we couldn't find batches by AniDB Episode ID, use query builder
queries := buildSmartSearchQueries(opts)
wg := sync.WaitGroup{}
mu := sync.Mutex{}
for _, query := range queries {
wg.Add(1)
go func(query string) {
defer wg.Done()
at.logger.Trace().Str("query", query).Msg("animetosho: Searching by query")
torrents, err := at.fetchTorrents(fmt.Sprintf("?only_tor=1&q=%s&qx=1", url.QueryEscape(query)))
if err != nil {
return
}
for _, t := range torrents {
// Skip if torrent has more than 1 file
if t.NumFiles > 1 && !(opts.Media.Format == string(anilist.MediaFormatMovie) && opts.Media.EpisodeCount == 1) {
continue
}
mu.Lock()
ret = append(ret, t.toAnimeTorrent(&opts.Media))
mu.Unlock()
}
}(query)
}
wg.Wait()
// Remove duplicates
lo.UniqBy(ret, func(t *hibiketorrent.AnimeTorrent) string {
return t.Link
})
return
}
func (at *Provider) smartSearchBatch(opts *hibiketorrent.AnimeSmartSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
ret = make([]*hibiketorrent.AnimeTorrent, 0)
at.logger.Debug().Int("aid", opts.AnidbAID).Msg("animetosho: Searching batches by Anime ID")
foundByID := false
atTorrents := make([]*Torrent, 0)
if opts.AnidbAID > 0 {
// Get all torrents by Anime ID
atTorrents, err = at.searchByAID(opts.AnidbAID, opts.Resolution)
if err != nil {
return nil, err
}
// Retain batches ONLY if the media is NOT a movie or single-episode
// i.e. if the media is a movie or single-episode return all torrents
if !(opts.Media.Format == string(anilist.MediaFormatMovie) || opts.Media.EpisodeCount == 1) {
batchTorrents := lo.Filter(atTorrents, func(t *Torrent, _ int) bool {
return t.NumFiles > 1
})
if len(batchTorrents) > 0 {
atTorrents = batchTorrents
}
}
if len(atTorrents) > 0 {
foundByID = true
}
}
if foundByID {
ret = at.torrentSliceToAnimeTorrentSlice(atTorrents, true, &opts.Media)
return
}
at.logger.Debug().Msg("animetosho: Searching batches by query")
// If we couldn't find batches by AniDB Anime ID, use query builder
queries := buildSmartSearchQueries(opts)
wg := sync.WaitGroup{}
mu := sync.Mutex{}
for _, query := range queries {
wg.Add(1)
go func(query string) {
defer wg.Done()
at.logger.Trace().Str("query", query).Msg("animetosho: Searching by query")
torrents, err := at.fetchTorrents(fmt.Sprintf("?only_tor=1&q=%s&order=size-d", url.QueryEscape(query)))
if err != nil {
return
}
for _, t := range torrents {
// Skip if not batch only if the media is not a movie or single-episode
if t.NumFiles == 1 && !(opts.Media.Format == string(anilist.MediaFormatMovie) && opts.Media.EpisodeCount == 1) {
continue
}
mu.Lock()
ret = append(ret, t.toAnimeTorrent(&opts.Media))
mu.Unlock()
}
}(query)
}
wg.Wait()
// Remove duplicates
lo.UniqBy(ret, func(t *hibiketorrent.AnimeTorrent) string {
return t.Link
})
return
}
type sneedexItem struct {
NyaaIDs []int `json:"nyaaIDs"`
EntryID string `json:"entryID"`
}
func (at *Provider) loadSneedex() {
// Load Sneedex Nyaa IDs
resp, err := http.Get("https://sneedex.moe/api/public/nyaa")
if err != nil {
at.logger.Error().Err(err).Msg("animetosho: Failed to fetch Sneedex Nyaa IDs")
return
}
defer resp.Body.Close()
b, err := io.ReadAll(resp.Body)
if err != nil {
at.logger.Error().Err(err).Msg("animetosho: Failed to read Sneedex Nyaa IDs response")
return
}
var sneedexItems []*sneedexItem
if err := json.Unmarshal(b, &sneedexItems); err != nil {
at.logger.Error().Err(err).Msg("animetosho: Failed to unmarshal Sneedex Nyaa IDs")
return
}
for _, item := range sneedexItems {
for _, nyaaID := range item.NyaaIDs {
at.sneedexNyaaIDs[nyaaID] = struct{}{}
}
}
at.logger.Debug().Int("count", len(at.sneedexNyaaIDs)).Msg("animetosho: Loaded Sneedex Nyaa IDs")
}
func (at *Provider) smartSearchBestReleases(opts *hibiketorrent.AnimeSmartSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return at.findSneedexBestReleases(opts)
}
func (at *Provider) findSneedexBestReleases(opts *hibiketorrent.AnimeSmartSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
ret := make([]*hibiketorrent.AnimeTorrent, 0)
at.logger.Debug().Int("aid", opts.AnidbAID).Msg("animetosho: Searching best releases by Anime ID")
if opts.AnidbAID > 0 {
// Get all torrents by Anime ID
atTorrents, err := at.searchByAID(opts.AnidbAID, opts.Resolution)
if err != nil {
return nil, err
}
// Filter by Sneedex Nyaa IDs
atTorrents = lo.Filter(atTorrents, func(t *Torrent, _ int) bool {
_, found := at.sneedexNyaaIDs[t.NyaaId]
return found
})
ret = at.torrentSliceToAnimeTorrentSlice(atTorrents, true, &opts.Media)
}
return ret, nil
}
//--------------------------------------------------------------------------------------------------------------------------------------------------//
func (at *Provider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return torrent.InfoHash, nil
}
func (at *Provider) GetTorrentMagnetLink(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return torrent.MagnetLink, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func buildSmartSearchQueries(opts *hibiketorrent.AnimeSmartSearchOptions) (ret []string) {
hasSingleEpisode := opts.Media.EpisodeCount == 1 || opts.Media.Format == string(anilist.MediaFormatMovie)
var queryStr []string // Final search query string, used for caching
allTitles := []string{opts.Media.RomajiTitle}
if opts.Media.EnglishTitle != nil {
allTitles = append(allTitles, *opts.Media.EnglishTitle)
}
for _, title := range opts.Media.Synonyms {
allTitles = append(allTitles, title)
}
//
// Media only has 1 episode
//
if hasSingleEpisode {
str := ""
// 1. Build a query string
qTitles := "("
for _, title := range allTitles {
qTitles += fmt.Sprintf("%s | ", sanitizeTitle(title))
}
qTitles = qTitles[:len(qTitles)-3] + ")"
str += qTitles
// 2. Add resolution
if opts.Resolution != "" {
str += " " + opts.Resolution
}
// e.g. (Attack on Titan|Shingeki no Kyojin) 1080p
queryStr = []string{str}
} else {
//
// Media has multiple episodes
//
if !opts.Batch { // Single episode search
qTitles := buildTitleString(opts)
qEpisodes := buildEpisodeString(opts)
str := ""
// 1. Add titles
str += qTitles
// 2. Add episodes
if qEpisodes != "" {
str += " " + qEpisodes
}
// 3. Add resolution
if opts.Resolution != "" {
str += " " + opts.Resolution
}
queryStr = append(queryStr, str)
// If we can also search for absolute episodes (there is an offset)
if opts.Media.AbsoluteSeasonOffset > 0 {
// Parse a good title
metadata := habari.Parse(opts.Media.RomajiTitle)
// 1. Start building a new query string
absoluteQueryStr := metadata.Title
// 2. Add episodes
ep := opts.EpisodeNumber + opts.Media.AbsoluteSeasonOffset
absoluteQueryStr += fmt.Sprintf(` ("%d"|"e%d"|"ep%d")`, ep, ep, ep)
// 3. Add resolution
if opts.Resolution != "" {
absoluteQueryStr += " " + opts.Resolution
}
// Overwrite queryStr by adding the absolute query string
queryStr = append(queryStr, fmt.Sprintf("(%s) | (%s)", absoluteQueryStr, str))
}
} else {
// Batch search
// e.g. "(Shingeki No Kyojin | Attack on Titan) ("Batch"|"Complete Series") 1080"
str := fmt.Sprintf(`(%s)`, opts.Media.RomajiTitle)
if opts.Media.EnglishTitle != nil {
str = fmt.Sprintf(`(%s | %s)`, opts.Media.RomajiTitle, *opts.Media.EnglishTitle)
}
str += " " + buildBatchGroup(&opts.Media)
if opts.Resolution != "" {
str += " " + opts.Resolution
}
queryStr = []string{str}
}
}
for _, q := range queryStr {
ret = append(ret, q)
ret = append(ret, q+" -S0")
}
return
}
// searches for torrents by Anime ID
func (at *Provider) searchByAID(aid int, quality string) (torrents []*Torrent, err error) {
q := url.QueryEscape(formatQuality(quality))
query := fmt.Sprintf(`?order=size-d&aid=%d&q=%s`, aid, q)
return at.fetchTorrents(query)
}
// searches for torrents by Episode ID
func (at *Provider) searchByEID(eid int, quality string) (torrents []*Torrent, err error) {
q := url.QueryEscape(formatQuality(quality))
query := fmt.Sprintf(`?eid=%d&q=%s`, eid, q)
return at.fetchTorrents(query)
}
func (at *Provider) fetchTorrents(suffix string) (torrents []*Torrent, err error) {
furl := JsonFeedUrl + suffix
at.logger.Debug().Str("url", furl).Msg("animetosho: Fetching torrents")
resp, err := http.Get(furl)
if err != nil {
return nil, err
}
defer resp.Body.Close()
// Check if the request was successful (status code 200)
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("failed to fetch torrents, %s", resp.Status)
}
b, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
// Parse the feed
var ret []*Torrent
if err := json.Unmarshal(b, &ret); err != nil {
return nil, err
}
for _, t := range ret {
if t.Seeders > 100000 {
t.Seeders = 0
}
if t.Leechers > 100000 {
t.Leechers = 0
}
}
return ret, nil
}
func formatQuality(quality string) string {
if quality == "" {
return ""
}
quality = strings.TrimSuffix(quality, "p")
return fmt.Sprintf(`%s`, quality)
}
// sanitizeTitle removes characters that impact the search query
func sanitizeTitle(t string) string {
// Replace hyphens with spaces
t = strings.ReplaceAll(t, "-", " ")
// Remove everything except alphanumeric characters, spaces.
re := regexp.MustCompile(`[^a-zA-Z0-9\s]`)
t = re.ReplaceAllString(t, "")
// Trim large spaces
re2 := regexp.MustCompile(`\s+`)
t = re2.ReplaceAllString(t, " ")
// return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(t, "!", ""), ":", ""), "[", ""), "]", "")
return t
}
func getAllTitles(media *hibiketorrent.Media) []string {
titles := make([]string, 0)
titles = append(titles, media.RomajiTitle)
if media.EnglishTitle != nil {
titles = append(titles, *media.EnglishTitle)
}
for _, title := range media.Synonyms {
titles = append(titles, title)
}
return titles
}
// ("01"|"e01") -S0
func buildEpisodeString(opts *hibiketorrent.AnimeSmartSearchOptions) string {
episodeStr := ""
if opts.EpisodeNumber != -1 {
pEp := zeropad(opts.EpisodeNumber)
episodeStr = fmt.Sprintf(`("%s"|"e%d") -S0`, pEp, opts.EpisodeNumber)
}
return episodeStr
}
func buildTitleString(opts *hibiketorrent.AnimeSmartSearchOptions) string {
romTitle := sanitizeTitle(opts.Media.RomajiTitle)
engTitle := ""
if opts.Media.EnglishTitle != nil {
engTitle = sanitizeTitle(*opts.Media.EnglishTitle)
}
season := 0
// create titles by extracting season/part info
titles := make([]string, 0)
for _, title := range getAllTitles(&opts.Media) {
s, cTitle := util.ExtractSeasonNumber(title)
if s != 0 { // update season if it got parsed
season = s
}
if cTitle != "" { // add "cleaned" titles
titles = append(titles, sanitizeTitle(cTitle))
}
}
// Check season from synonyms, only update season if it's still 0
for _, synonym := range opts.Media.Synonyms {
s, _ := util.ExtractSeasonNumber(synonym)
if s != 0 && season == 0 {
season = s
}
}
// add romaji and english titles to the title list
titles = append(titles, romTitle)
if len(engTitle) > 0 {
titles = append(titles, engTitle)
}
// convert III and II to season
// these will get cleaned later
if season == 0 && strings.Contains(strings.ToLower(romTitle), " iii") {
season = 3
}
if season == 0 && strings.Contains(strings.ToLower(romTitle), " ii") {
season = 2
}
if engTitle != "" {
if season == 0 && strings.Contains(strings.ToLower(engTitle), " iii") {
season = 3
}
if season == 0 && strings.Contains(strings.ToLower(engTitle), " ii") {
season = 2
}
}
// also, split romaji title by colon,
// if first part is long enough, add it to the title list
// DEVNOTE maybe we should only do that if the season IS found
split := strings.Split(romTitle, ":")
if len(split) > 1 && len(split[0]) > 8 {
titles = append(titles, split[0])
}
if engTitle != "" {
split = strings.Split(engTitle, ":")
if len(split) > 1 && len(split[0]) > 8 {
titles = append(titles, split[0])
}
}
// clean titles
for i, title := range titles {
titles[i] = strings.TrimSpace(strings.ReplaceAll(title, ":", " "))
titles[i] = strings.TrimSpace(strings.ReplaceAll(titles[i], "-", " "))
titles[i] = strings.Join(strings.Fields(titles[i]), " ")
titles[i] = strings.ToLower(titles[i])
if season != 0 {
titles[i] = strings.ReplaceAll(titles[i], " iii", "")
titles[i] = strings.ReplaceAll(titles[i], " ii", "")
}
}
titles = lo.Uniq(titles)
shortestTitle := ""
for _, title := range titles {
if shortestTitle == "" || len(title) < len(shortestTitle) {
shortestTitle = title
}
}
/////////////////////// Season
seasonBuff := bytes.NewBufferString("")
if season > 0 {
// (season 1|season 01|s1|s01)
// Season section
// e.g. S1, season 1, season 01
seasonBuff.WriteString(fmt.Sprintf(`"%s %s%d" | `, shortestTitle, "season ", season))
seasonBuff.WriteString(fmt.Sprintf(`"%s %s%s" | `, shortestTitle, "season ", zeropad(season)))
seasonBuff.WriteString(fmt.Sprintf(`"%s %s%d" | `, shortestTitle, "s", season))
seasonBuff.WriteString(fmt.Sprintf(`"%s %s%s"`, shortestTitle, "s", zeropad(season)))
}
qTitles := "("
for idx, title := range titles {
qTitles += "\"" + title + "\"" + " | "
if idx == len(titles)-1 {
qTitles = qTitles[:len(qTitles)-3]
}
}
qTitles += seasonBuff.String()
qTitles += ")"
return qTitles
}
func zeropad(v interface{}) string {
switch i := v.(type) {
case int:
return fmt.Sprintf("%02d", i)
case string:
return fmt.Sprintf("%02s", i)
default:
return ""
}
}
func buildBatchGroup(m *hibiketorrent.Media) string {
buff := bytes.NewBufferString("")
buff.WriteString("(")
// e.g. 01-12
s1 := fmt.Sprintf(`"%s%s%s"`, zeropad("1"), " - ", zeropad(m.EpisodeCount))
buff.WriteString(s1)
buff.WriteString("|")
// e.g. 01~12
s2 := fmt.Sprintf(`"%s%s%s"`, zeropad("1"), " ~ ", zeropad(m.EpisodeCount))
buff.WriteString(s2)
buff.WriteString("|")
// e.g. 01~12
buff.WriteString(`"Batch"|`)
buff.WriteString(`"Complete"|`)
buff.WriteString(`"+ OVA"|`)
buff.WriteString(`"+ Specials"|`)
buff.WriteString(`"+ Special"|`)
buff.WriteString(`"Seasons"|`)
buff.WriteString(`"Parts"`)
buff.WriteString(")")
return buff.String()
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (at *Provider) torrentSliceToAnimeTorrentSlice(torrents []*Torrent, confirmed bool, media *hibiketorrent.Media) []*hibiketorrent.AnimeTorrent {
wg := sync.WaitGroup{}
mu := sync.Mutex{}
ret := make([]*hibiketorrent.AnimeTorrent, 0)
for _, torrent := range torrents {
wg.Add(1)
go func(torrent *Torrent) {
defer wg.Done()
t := torrent.toAnimeTorrent(media)
_, isBest := at.sneedexNyaaIDs[torrent.NyaaId]
t.IsBestRelease = isBest
t.Confirmed = confirmed
mu.Lock()
ret = append(ret, t)
mu.Unlock()
}(torrent)
}
wg.Wait()
return ret
}
func (t *Torrent) toAnimeTorrent(media *hibiketorrent.Media) *hibiketorrent.AnimeTorrent {
metadata := habari.Parse(t.Title)
formattedDate := ""
parsedDate := time.Unix(int64(t.Timestamp), 0)
formattedDate = parsedDate.Format(time.RFC3339)
ret := &hibiketorrent.AnimeTorrent{
Name: t.Title,
Date: formattedDate,
Size: t.TotalSize,
FormattedSize: util.Bytes(uint64(t.TotalSize)),
Seeders: t.Seeders,
Leechers: t.Leechers,
DownloadCount: t.TorrentDownloadCount,
Link: t.Link,
DownloadUrl: t.TorrentUrl,
MagnetLink: t.MagnetUri,
InfoHash: t.InfoHash,
Resolution: metadata.VideoResolution,
IsBatch: t.NumFiles > 1,
EpisodeNumber: 0,
ReleaseGroup: metadata.ReleaseGroup,
Provider: ProviderName,
IsBestRelease: false,
Confirmed: false,
}
episode := -1
if len(metadata.EpisodeNumber) == 1 {
episode = util.StringToIntMust(metadata.EpisodeNumber[0])
}
// Force set episode number to 1 if it's a movie or single-episode and the torrent isn't a batch
if !ret.IsBatch && episode == -1 && (media.EpisodeCount == 1 || media.Format == string(anilist.MediaFormatMovie)) {
episode = 1
}
ret.EpisodeNumber = episode
return ret
}

View File

@@ -0,0 +1,185 @@
package animetosho
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"strconv"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSmartSearch(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
toshoPlatform := NewProvider(util.NewLogger())
metadataProvider := metadata.GetMockProvider(t)
tests := []struct {
name string
mId int
batch bool
episodeNumber int
absoluteOffset int
resolution string
}{
{
name: "Bungou Stray Dogs 5th Season Episode 11",
mId: 163263,
batch: false,
episodeNumber: 11,
absoluteOffset: 45,
resolution: "1080",
},
{
name: "SPY×FAMILY Season 1 Part 2",
mId: 142838,
batch: false,
episodeNumber: 12,
absoluteOffset: 12,
resolution: "1080",
},
{
name: "Jujutsu Kaisen Season 2",
mId: 145064,
batch: false,
episodeNumber: 2,
absoluteOffset: 24,
resolution: "",
},
{
name: "Violet Evergarden The Movie",
mId: 103047,
batch: true,
episodeNumber: 1,
absoluteOffset: 0,
resolution: "720",
},
{
name: "Sousou no Frieren",
mId: 154587,
batch: false,
episodeNumber: 10,
absoluteOffset: 0,
resolution: "1080",
},
{
name: "Tokubetsu-hen Hibike! Euphonium: Ensemble",
mId: 150429,
batch: false,
episodeNumber: 1,
absoluteOffset: 0,
resolution: "1080",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
media, err := anilistPlatform.GetAnime(t.Context(), tt.mId)
animeMetadata, err := metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, tt.mId)
require.NoError(t, err)
queryMedia := hibiketorrent.Media{
ID: media.GetID(),
IDMal: media.GetIDMal(),
Status: string(*media.GetStatus()),
Format: string(*media.GetFormat()),
EnglishTitle: media.GetTitle().GetEnglish(),
RomajiTitle: media.GetRomajiTitleSafe(),
EpisodeCount: media.GetTotalEpisodeCount(),
AbsoluteSeasonOffset: tt.absoluteOffset,
Synonyms: media.GetSynonymsContainingSeason(),
IsAdult: *media.GetIsAdult(),
StartDate: &hibiketorrent.FuzzyDate{
Year: *media.GetStartDate().GetYear(),
Month: media.GetStartDate().GetMonth(),
Day: media.GetStartDate().GetDay(),
},
}
if assert.NoError(t, err) {
episodeMetadata, ok := animeMetadata.FindEpisode(strconv.Itoa(tt.episodeNumber))
require.True(t, ok)
torrents, err := toshoPlatform.SmartSearch(hibiketorrent.AnimeSmartSearchOptions{
Media: queryMedia,
Query: "",
Batch: tt.batch,
EpisodeNumber: tt.episodeNumber,
Resolution: tt.resolution,
AnidbAID: animeMetadata.Mappings.AnidbId,
AnidbEID: episodeMetadata.AnidbEid,
BestReleases: false,
})
require.NoError(t, err)
require.GreaterOrEqual(t, len(torrents), 1, "expected at least 1 torrent")
for _, torrent := range torrents {
t.Log(torrent.Name)
t.Logf("\tLink: %s", torrent.Link)
t.Logf("\tMagnet: %s", torrent.MagnetLink)
t.Logf("\tEpisodeNumber: %d", torrent.EpisodeNumber)
t.Logf("\tResolution: %s", torrent.Resolution)
t.Logf("\tIsBatch: %v", torrent.IsBatch)
t.Logf("\tConfirmed: %v", torrent.Confirmed)
}
}
})
}
}
func TestSearch2(t *testing.T) {
toshoPlatform := NewProvider(util.NewLogger())
torrents, err := toshoPlatform.Search(hibiketorrent.AnimeSearchOptions{
Media: hibiketorrent.Media{},
Query: "Kusuriya no Hitorigoto 05",
})
require.NoError(t, err)
require.GreaterOrEqual(t, len(torrents), 1, "expected at least 1 torrent")
for _, torrent := range torrents {
t.Log(torrent.Name)
t.Logf("\tLink: %s", torrent.Link)
t.Logf("\tMagnet: %s", torrent.MagnetLink)
t.Logf("\tEpisodeNumber: %d", torrent.EpisodeNumber)
t.Logf("\tResolution: %s", torrent.Resolution)
t.Logf("\tIsBatch: %v", torrent.IsBatch)
t.Logf("\tConfirmed: %v", torrent.Confirmed)
}
}
func TestGetLatest(t *testing.T) {
toshoPlatform := NewProvider(util.NewLogger())
torrents, err := toshoPlatform.GetLatest()
require.NoError(t, err)
require.GreaterOrEqual(t, len(torrents), 1, "expected at least 1 torrent")
for _, torrent := range torrents {
t.Log(torrent.Name)
t.Logf("\tLink: %s", torrent.Link)
t.Logf("\tMagnet: %s", torrent.MagnetLink)
t.Logf("\tEpisodeNumber: %d", torrent.EpisodeNumber)
t.Logf("\tResolution: %s", torrent.Resolution)
t.Logf("\tIsBatch: %v", torrent.IsBatch)
t.Logf("\tConfirmed: %v", torrent.Confirmed)
}
}

View File

@@ -0,0 +1,81 @@
package animetosho
import (
"errors"
"github.com/gocolly/colly"
"strings"
)
func TorrentFile(viewURL string) (string, error) {
var torrentLink string
c := colly.NewCollector()
c.OnHTML("a[href]", func(e *colly.HTMLElement) {
if strings.HasSuffix(e.Attr("href"), ".torrent") {
torrentLink = e.Attr("href")
}
})
var e error
c.OnError(func(r *colly.Response, err error) {
e = err
})
if e != nil {
return "", e
}
c.Visit(viewURL)
if torrentLink == "" {
return "", errors.New("download link not found")
}
return torrentLink, nil
}
func TorrentMagnet(viewURL string) (string, error) {
var magnetLink string
c := colly.NewCollector()
c.OnHTML("a[href]", func(e *colly.HTMLElement) {
if strings.HasPrefix(e.Attr("href"), "magnet:?xt=") {
magnetLink = e.Attr("href")
}
})
var e error
c.OnError(func(r *colly.Response, err error) {
e = err
})
if e != nil {
return "", e
}
c.Visit(viewURL)
if magnetLink == "" {
return "", errors.New("magnet link not found")
}
return magnetLink, nil
}
func TorrentHash(viewURL string) (string, error) {
file, err := TorrentFile(viewURL)
if err != nil {
return "", err
}
file = strings.Replace(file, "https://", "", 1)
//template := "%s/storage/torrent/%s/%s"
parts := strings.Split(file, "/")
if len(parts) < 4 {
return "", errors.New("hash not found")
}
return parts[3], nil
}

View File

@@ -0,0 +1,47 @@
package animetosho
import (
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/assert"
)
func TestMagnet(t *testing.T) {
url := util.Decode("aHR0cHM6Ly9hbmltZXRvc2hvLm9yZy92aWV3L2thaXpva3UtanVqdXRzdS1rYWlzZW4tMjYtYTFjOWJhYjEtc2Vhc29uLTIubjE3MTAxMTY=")
magnet, err := TorrentMagnet(url)
if assert.NoError(t, err) {
if assert.NotEmptyf(t, magnet, "magnet link not found") {
t.Log(magnet)
}
}
}
func TestTorrentFile(t *testing.T) {
url := util.Decode("aHR0cHM6Ly9hbmltZXRvc2hvLm9yZy92aWV3L2thaXpva3UtanVqdXRzdS1rYWlzZW4tMjYtYTFjOWJhYjEtc2Vhc29uLTIubjE3MTAxMTY=")
link, err := TorrentFile(url)
if assert.NoError(t, err) {
if assert.NotEmptyf(t, link, "download link not found") {
t.Log(link)
}
}
}
func TestTorrentHash(t *testing.T) {
url := util.Decode("aHR0cHM6Ly9hbmltZXRvc2hvLm9yZy92aWV3L2thaXpva3UtanVqdXRzdS1rYWlzZW4tMjYtYTFjOWJhYjEtc2Vhc29uLTIubjE3MTAxMTY=")
hash, err := TorrentHash(url)
if assert.NoError(t, err) {
if assert.NotEmptyf(t, hash, "hash not found") {
t.Log(hash)
}
}
}

View File

@@ -0,0 +1,259 @@
package nyaa
import (
"fmt"
gourl "net/url"
"seanime/internal/util"
)
type (
Torrent struct {
Category string `json:"category"`
Name string `json:"name"`
Description string `json:"description"`
Date string `json:"date"`
Size string `json:"size"`
Seeders string `json:"seeders"`
Leechers string `json:"leechers"`
Downloads string `json:"downloads"`
IsTrusted string `json:"isTrusted"`
IsRemake string `json:"isRemake"`
Comments string `json:"comments"`
Link string `json:"link"`
GUID string `json:"guid"`
CategoryID string `json:"categoryID"`
InfoHash string `json:"infoHash"`
}
BuildURLOptions struct {
Provider string
Query string
Category string
SortBy string
Filter string
}
Comment struct {
User string `json:"user"`
Date string `json:"date"`
Text string `json:"text"`
}
)
func (t *Torrent) GetSizeInBytes() int64 {
bytes, _ := util.StringSizeToBytes(t.Size)
return bytes
}
var (
nyaaBaseURL = util.Decode("aHR0cHM6Ly9ueWFhLnNpLz9wYWdlPXJzcyZxPSs=")
sukebeiBaseURL = util.Decode("aHR0cHM6Ly9zdWtlYmVpLm55YWEuc2kvP3BhZ2U9cnNzJnE9Kw==")
nyaaView = util.Decode("aHR0cHM6Ly9ueWFhLnNpL3ZpZXcv")
sukebeiView = util.Decode("aHR0cHM6Ly9zdWtlYmVpLm55YWEuc2kvdmlldy8=")
)
const (
sortByComments = "&s=comments&o=desc"
sortBySeeders = "&s=seeders&o=desc"
sortByLeechers = "&s=leechers&o=desc"
sortByDownloads = "&s=downloads&o=desc"
sortBySizeDsc = "&s=size&o=desc"
sortBySizeAsc = "&s=size&o=asc"
sortByDate = "&s=id&o=desc"
filterNoFilter = "&f=0"
filterNoRemakes = "&f=1"
filterTrustedOnly = "&f=2"
categoryAll = "&c=0_0"
categoryAnime = "&c=1_0"
CategoryAnime = "&c=1_0"
categoryAnimeAMV = "&c=1_1"
categoryAnimeEng = "&c=1_2"
CategoryAnimeEng = "&c=1_2"
categoryAnimeNonEng = "&c=1_3"
CategoryAnimeNonEng = "&c=1_3"
categoryAnimeRaw = "&c=1_4"
categoryAudio = "&c=2_0"
categoryAudioLossless = "&c=2_1"
categoryAudioLossy = "&c=2_2"
categoryLiterature = "&c=3_0"
categoryLiteratureEng = "&c=3_1"
categoryLiteratureNonEng = "&c=3_2"
categoryLiteratureRaw = "&c=3_3"
categoryLiveAction = "&c=4_0"
categoryLiveActionRaw = "&c=4_4"
categoryLiveActionEng = "&c=4_1"
categoryLiveActionNonEng = "&c=4_3"
categoryLiveActionIdolProm = "&c=4_2"
categoryPictures = "&c=5_0"
categoryPicturesGraphics = "&c=5_1"
categoryPicturesPhotos = "&c=5_2"
categorySoftware = "&c=6_0"
categorySoftwareApps = "&c=6_1"
categorySoftwareGames = "&c=6_2"
categoryArt = "&c=1_0"
categoryArtAnime = "&c=1_1"
categoryArtDoujinshi = "&c=1_2"
categoryArtGames = "&c=1_3"
categoryArtManga = "&c=1_4"
categoryArtPictures = "&c=1_5"
categoryRealLife = "&c=2_0"
categoryRealLifePhotos = "&c=2_1"
categoryRealLifeVideos = "&c=2_2"
)
func buildURL(baseUrl string, opts BuildURLOptions) (string, error) {
var url string
if baseUrl == "" {
if opts.Provider == "nyaa" {
url = nyaaBaseURL
} else if opts.Provider == "sukebei" {
url = sukebeiBaseURL
} else {
err := fmt.Errorf("provider option could be nyaa or sukebei")
return "", err
}
} else {
url = baseUrl
}
if opts.Query != "" {
url += gourl.QueryEscape(opts.Query)
}
if opts.Provider == "nyaa" {
if opts.Category != "" {
switch opts.Category {
case "all":
url += categoryAll
case "anime":
url += categoryAnime
case "anime-amv":
url += categoryAnimeAMV
case "anime-eng":
url += categoryAnimeEng
case "anime-non-eng":
url += categoryAnimeNonEng
case "anime-raw":
url += categoryAnimeRaw
case "audio":
url += categoryAudio
case "audio-lossless":
url += categoryAudioLossless
case "audio-lossy":
url += categoryAudioLossy
case "literature":
url += categoryLiterature
case "literature-eng":
url += categoryLiteratureEng
case "literature-non-eng":
url += categoryLiteratureNonEng
case "literature-raw":
url += categoryLiteratureRaw
case "live-action":
url += categoryLiveAction
case "live-action-raw":
url += categoryLiveActionRaw
case "live-action-eng":
url += categoryLiveActionEng
case "live-action-non-eng":
url += categoryLiveActionNonEng
case "live-action-idol-prom":
url += categoryLiveActionIdolProm
case "pictures":
url += categoryPictures
case "pictures-graphics":
url += categoryPicturesGraphics
case "pictures-photos":
url += categoryPicturesPhotos
case "software":
url += categorySoftware
case "software-apps":
url += categorySoftwareApps
case "software-games":
url += categorySoftwareGames
default:
err := fmt.Errorf("such nyaa category option does not exitst")
return "", err
}
}
}
if opts.Provider == "sukebei" {
if opts.Category != "" {
switch opts.Category {
case "all":
url += categoryAll
case "art":
url += categoryArt
case "art-anime":
url += categoryArtAnime
case "art-doujinshi":
url += categoryArtDoujinshi
case "art-games":
url += categoryArtGames
case "art-manga":
url += categoryArtManga
case "art-pictures":
url += categoryArtPictures
case "real-life":
url += categoryRealLife
case "real-life-photos":
url += categoryRealLifePhotos
case "real-life-videos":
url += categoryRealLifeVideos
default:
err := fmt.Errorf("such sukebei category option does not exitst")
return "", err
}
}
}
if opts.SortBy != "" {
switch opts.SortBy {
case "downloads":
url += sortByDownloads
case "comments":
url += sortByComments
case "seeders":
url += sortBySeeders
case "leechers":
url += sortByLeechers
case "size-asc":
url += sortBySizeAsc
case "size-dsc":
url += sortBySizeDsc
case "date":
url += sortByDate
default:
err := fmt.Errorf("such sort option does not exitst")
return "", err
}
}
if opts.Filter != "" {
switch opts.Filter {
case "no-filter":
url += filterNoFilter
case "no-remakes":
url += filterNoRemakes
case "trusted-only":
url += filterTrustedOnly
default:
err := fmt.Errorf("such filter option does not exitst")
return "", err
}
}
return url, nil
}

View File

@@ -0,0 +1,569 @@
package nyaa
import (
"bytes"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/extension"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/util"
"seanime/internal/util/comparison"
"strconv"
"strings"
"sync"
"time"
"github.com/5rahim/habari"
"github.com/mmcdole/gofeed"
"github.com/rs/zerolog"
"github.com/samber/lo"
)
const (
NyaaProviderName = "nyaa"
)
type Provider struct {
logger *zerolog.Logger
category string
baseUrl string
}
func NewProvider(logger *zerolog.Logger, category string) hibiketorrent.AnimeProvider {
return &Provider{
logger: logger,
category: category,
}
}
func (n *Provider) SetSavedUserConfig(config extension.SavedUserConfig) {
n.baseUrl, _ = config.Values["apiUrl"]
}
func (n *Provider) GetSettings() hibiketorrent.AnimeProviderSettings {
return hibiketorrent.AnimeProviderSettings{
Type: hibiketorrent.AnimeProviderTypeMain,
CanSmartSearch: true,
SmartSearchFilters: []hibiketorrent.AnimeProviderSmartSearchFilter{
hibiketorrent.AnimeProviderSmartSearchFilterBatch,
hibiketorrent.AnimeProviderSmartSearchFilterEpisodeNumber,
hibiketorrent.AnimeProviderSmartSearchFilterResolution,
hibiketorrent.AnimeProviderSmartSearchFilterQuery,
},
SupportsAdult: false,
}
}
func (n *Provider) GetLatest() (ret []*hibiketorrent.AnimeTorrent, err error) {
fp := gofeed.NewParser()
url, err := buildURL(n.baseUrl, BuildURLOptions{
Provider: "nyaa",
Query: "",
Category: n.category,
SortBy: "seeders",
Filter: "",
})
if err != nil {
return nil, err
}
// get content
feed, err := fp.ParseURL(url)
if err != nil {
return nil, err
}
// parse content
res := convertRSS(feed)
ret = torrentSliceToAnimeTorrentSlice(res, NyaaProviderName)
return
}
func (n *Provider) Search(opts hibiketorrent.AnimeSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
fp := gofeed.NewParser()
n.logger.Trace().Str("query", opts.Query).Msg("nyaa: Search query")
url, err := buildURL(n.baseUrl, BuildURLOptions{
Provider: "nyaa",
Query: opts.Query,
Category: n.category,
SortBy: "seeders",
Filter: "",
})
if err != nil {
return nil, err
}
// get content
feed, err := fp.ParseURL(url)
if err != nil {
return nil, err
}
// parse content
res := convertRSS(feed)
ret = torrentSliceToAnimeTorrentSlice(res, NyaaProviderName)
return
}
func (n *Provider) SmartSearch(opts hibiketorrent.AnimeSmartSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
queries, ok := buildSmartSearchQueries(&opts)
if !ok {
return nil, fmt.Errorf("could not build queries")
}
wg := sync.WaitGroup{}
mu := sync.Mutex{}
for _, query := range queries {
wg.Add(1)
go func(query string) {
defer wg.Done()
fp := gofeed.NewParser()
n.logger.Trace().Str("query", query).Msg("nyaa: Smart search query")
url, err := buildURL(n.baseUrl, BuildURLOptions{
Provider: "nyaa",
Query: query,
Category: n.category,
SortBy: "seeders",
Filter: "",
})
if err != nil {
return
}
n.logger.Trace().Str("url", url).Msg("nyaa: Smart search url")
// get content
feed, err := fp.ParseURL(url)
if err != nil {
return
}
// parse content
res := convertRSS(feed)
mu.Lock()
ret = torrentSliceToAnimeTorrentSlice(res, NyaaProviderName)
mu.Unlock()
}(query)
}
wg.Wait()
// remove duplicates
lo.UniqBy(ret, func(i *hibiketorrent.AnimeTorrent) string {
return i.Link
})
if !opts.Batch {
// Single-episode search
// If the episode number is provided, we can filter the results
ret = lo.Filter(ret, func(i *hibiketorrent.AnimeTorrent, _ int) bool {
relEp := i.EpisodeNumber
if relEp == -1 {
return false
}
absEp := opts.Media.AbsoluteSeasonOffset + opts.EpisodeNumber
return opts.EpisodeNumber == relEp || absEp == relEp
})
}
return
}
func (n *Provider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return torrent.InfoHash, nil
}
func (n *Provider) GetTorrentMagnetLink(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return TorrentMagnet(torrent.Link)
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ADVANCED SEARCH
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// buildSmartSearchQueries will return a slice of queries for nyaa.si.
// The second index of the returned slice is the absolute episode query.
// If the function returns false, the query could not be built.
// BuildSearchQueryOptions.Title will override the constructed title query but not other parameters.
func buildSmartSearchQueries(opts *hibiketorrent.AnimeSmartSearchOptions) ([]string, bool) {
romTitle := opts.Media.RomajiTitle
engTitle := opts.Media.EnglishTitle
allTitles := []*string{&romTitle, engTitle}
for _, synonym := range opts.Media.Synonyms {
allTitles = append(allTitles, &synonym)
}
season := 0
part := 0
// create titles by extracting season/part info
titles := make([]string, 0)
// Build titles if no query provided
if opts.Query == "" {
for _, title := range allTitles {
if title == nil {
continue
}
s, cTitle := util.ExtractSeasonNumber(*title)
p, cTitle := util.ExtractPartNumber(cTitle)
if s != 0 { // update season if it got parsed
season = s
}
if p != 0 { // update part if it got parsed
part = p
}
if cTitle != "" { // add "cleaned" titles
titles = append(titles, cTitle)
}
}
// Check season from synonyms, only update season if it's still 0
for _, synonym := range opts.Media.Synonyms {
s, _ := util.ExtractSeasonNumber(synonym)
if s != 0 && season == 0 {
season = s
}
}
// no season or part got parsed, meaning there is no "cleaned" title,
// add romaji and english titles to the title list
if season == 0 && part == 0 {
titles = append(titles, romTitle)
if engTitle != nil {
if len(*engTitle) > 0 {
titles = append(titles, *engTitle)
}
}
}
// convert III and II to season
// these will get cleaned later
if season == 0 && (strings.Contains(strings.ToLower(romTitle), " iii")) {
season = 3
}
if season == 0 && (strings.Contains(strings.ToLower(romTitle), " ii")) {
season = 2
}
if engTitle != nil {
if season == 0 && (strings.Contains(strings.ToLower(*engTitle), " iii")) {
season = 3
}
if season == 0 && (strings.Contains(strings.ToLower(*engTitle), " ii")) {
season = 2
}
}
// also, split romaji title by colon,
// if first part is long enough, add it to the title list
// DEVNOTE maybe we should only do that if the season IS found
split := strings.Split(romTitle, ":")
if len(split) > 1 && len(split[0]) > 8 {
titles = append(titles, split[0])
}
if engTitle != nil {
split := strings.Split(*engTitle, ":")
if len(split) > 1 && len(split[0]) > 8 {
titles = append(titles, split[0])
}
}
// clean titles
for i, title := range titles {
titles[i] = strings.TrimSpace(strings.ReplaceAll(title, ":", " "))
titles[i] = strings.TrimSpace(strings.ReplaceAll(titles[i], "-", " "))
titles[i] = strings.Join(strings.Fields(titles[i]), " ")
titles[i] = strings.ToLower(titles[i])
if season != 0 {
titles[i] = strings.ReplaceAll(titles[i], " iii", "")
titles[i] = strings.ReplaceAll(titles[i], " ii", "")
}
}
titles = lo.Uniq(titles)
} else {
titles = append(titles, strings.ToLower(opts.Query))
}
//
// Parameters
//
// can batch if media stopped airing
canBatch := false
if opts.Media.Status == string(anilist.MediaStatusFinished) && opts.Media.EpisodeCount > 0 {
canBatch = true
}
normalBuff := bytes.NewBufferString("")
// Batch section - empty unless:
// 1. If the media is finished and has more than 1 episode
// 2. If the media is not a movie
// 3. If the media is not a single episode
batchBuff := bytes.NewBufferString("")
if opts.Batch && canBatch && !(opts.Media.Format == string(anilist.MediaFormatMovie) && opts.Media.EpisodeCount == 1) {
if season != 0 {
batchBuff.WriteString(buildSeasonString(season))
}
if part != 0 {
batchBuff.WriteString(buildPartString(part))
}
batchBuff.WriteString(buildBatchString(&opts.Media))
} else {
normalBuff.WriteString(buildSeasonString(season))
if part != 0 {
normalBuff.WriteString(buildPartString(part))
}
if !(opts.Media.Format == string(anilist.MediaFormatMovie) && opts.Media.EpisodeCount == 1) {
normalBuff.WriteString(buildEpisodeString(opts.EpisodeNumber))
}
}
titleStr := buildTitleString(titles)
batchStr := batchBuff.String()
normalStr := normalBuff.String()
// Replace titleStr if user provided one
if opts.Query != "" {
titleStr = fmt.Sprintf(`(%s)`, opts.Query)
}
//println(spew.Sdump(titleStr, batchStr, normalStr))
query := fmt.Sprintf("%s%s%s", titleStr, batchStr, normalStr)
if opts.Resolution != "" {
query = fmt.Sprintf("%s(%s)", query, opts.Resolution)
} else {
query = fmt.Sprintf("%s(%s)", query, strings.Join([]string{"360", "480", "720", "1080"}, "|"))
}
query2 := ""
// Absolute episode addition
if !opts.Batch && opts.Media.AbsoluteSeasonOffset > 0 && !(opts.Media.Format == string(anilist.MediaFormatMovie) && opts.Media.EpisodeCount == 1) {
query2 = fmt.Sprintf("%s", buildAbsoluteGroupString(titleStr, opts.Resolution, opts)) // e.g. jujutsu kaisen 25
}
ret := []string{query}
if query2 != "" {
ret = append(ret, query2)
}
return ret, true
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//func sanitizeTitle(t string) string {
// return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(t, "!", ""), ":", ""), "[", ""), "]", ""), ".", "")
//}
// (title)
// ("jjk"|"jujutsu kaisen")
func buildTitleString(titles []string) string {
// Single titles are not wrapped in quotes
if len(titles) == 1 {
return fmt.Sprintf(`(%s)`, titles[0])
}
return fmt.Sprintf("(%s)", strings.Join(lo.Map(titles, func(item string, _ int) string {
return fmt.Sprintf(`"%s"`, item)
}), "|"))
}
func buildAbsoluteGroupString(title, resolution string, opts *hibiketorrent.AnimeSmartSearchOptions) string {
return fmt.Sprintf("%s(%d)(%s)", title, opts.EpisodeNumber+opts.Media.AbsoluteSeasonOffset, resolution)
}
// (s01e01)
func buildSeasonAndEpisodeGroup(season int, ep int) string {
if season == 0 {
season = 1
}
return fmt.Sprintf(`"s%se%s"`, zeropad(season), zeropad(ep))
}
// (01|e01|e01v|ep01|ep1)
func buildEpisodeString(ep int) string {
pEp := zeropad(ep)
//return fmt.Sprintf(`("%s"|"e%s"|"e%sv"|"%sv"|"ep%s"|"ep%d")`, pEp, pEp, pEp, pEp, pEp, ep)
return fmt.Sprintf(`(%s|e%s|e%sv|%sv|ep%s|ep%d)`, pEp, pEp, pEp, pEp, pEp, ep)
}
// (season 1|season 01|s1|s01)
func buildSeasonString(season int) string {
// Season section
seasonBuff := bytes.NewBufferString("")
// e.g. S1, season 1, season 01
if season != 0 {
seasonBuff.WriteString(fmt.Sprintf(`("%s%d"|`, "season ", season))
seasonBuff.WriteString(fmt.Sprintf(`"%s%s"|`, "season ", zeropad(season)))
seasonBuff.WriteString(fmt.Sprintf(`"%s%d"|`, "s", season))
seasonBuff.WriteString(fmt.Sprintf(`"%s%s")`, "s", zeropad(season)))
}
return seasonBuff.String()
}
func buildPartString(part int) string {
partBuff := bytes.NewBufferString("")
if part != 0 {
partBuff.WriteString(fmt.Sprintf(`("%s%d")`, "part ", part))
}
return partBuff.String()
}
func buildBatchString(m *hibiketorrent.Media) string {
buff := bytes.NewBufferString("")
buff.WriteString("(")
// e.g. 01-12
s1 := fmt.Sprintf(`"%s%s%s"`, zeropad("1"), " - ", zeropad(m.EpisodeCount))
buff.WriteString(s1)
buff.WriteString("|")
// e.g. 01~12
s2 := fmt.Sprintf(`"%s%s%s"`, zeropad("1"), " ~ ", zeropad(m.EpisodeCount))
buff.WriteString(s2)
buff.WriteString("|")
// e.g. 01~12
buff.WriteString(`"Batch"|`)
buff.WriteString(`"Complete"|`)
buff.WriteString(`"+ OVA"|`)
buff.WriteString(`"+ Specials"|`)
buff.WriteString(`"+ Special"|`)
buff.WriteString(`"Seasons"|`)
buff.WriteString(`"Parts"`)
buff.WriteString(")")
return buff.String()
}
func zeropad(v interface{}) string {
switch i := v.(type) {
case int:
return fmt.Sprintf("%02d", i)
case string:
return fmt.Sprintf("%02s", i)
default:
return ""
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func convertRSS(feed *gofeed.Feed) []Torrent {
var res []Torrent
for _, item := range feed.Items {
res = append(
res,
Torrent{
Name: item.Title,
Link: item.Link,
Date: item.Published,
Description: item.Description,
GUID: item.GUID,
Comments: item.Extensions["nyaa"]["comments"][0].Value,
IsTrusted: item.Extensions["nyaa"]["trusted"][0].Value,
IsRemake: item.Extensions["nyaa"]["remake"][0].Value,
Size: item.Extensions["nyaa"]["size"][0].Value,
Seeders: item.Extensions["nyaa"]["seeders"][0].Value,
Leechers: item.Extensions["nyaa"]["leechers"][0].Value,
Downloads: item.Extensions["nyaa"]["downloads"][0].Value,
Category: item.Extensions["nyaa"]["category"][0].Value,
CategoryID: item.Extensions["nyaa"]["categoryId"][0].Value,
InfoHash: item.Extensions["nyaa"]["infoHash"][0].Value,
},
)
}
return res
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func torrentSliceToAnimeTorrentSlice(torrents []Torrent, providerName string) []*hibiketorrent.AnimeTorrent {
wg := sync.WaitGroup{}
mu := sync.Mutex{}
ret := make([]*hibiketorrent.AnimeTorrent, 0)
for _, torrent := range torrents {
wg.Add(1)
go func(torrent Torrent) {
defer wg.Done()
mu.Lock()
ret = append(ret, torrent.toAnimeTorrent(providerName))
mu.Unlock()
}(torrent)
}
wg.Wait()
return ret
}
func (t *Torrent) toAnimeTorrent(providerName string) *hibiketorrent.AnimeTorrent {
metadata := habari.Parse(t.Name)
seeders, _ := strconv.Atoi(t.Seeders)
leechers, _ := strconv.Atoi(t.Leechers)
downloads, _ := strconv.Atoi(t.Downloads)
formattedDate := ""
parsedDate, err := time.Parse("Mon, 02 Jan 2006 15:04:05 -0700", t.Date)
if err == nil {
formattedDate = parsedDate.Format(time.RFC3339)
}
ret := &hibiketorrent.AnimeTorrent{
Name: t.Name,
Date: formattedDate,
Size: t.GetSizeInBytes(),
FormattedSize: t.Size,
Seeders: seeders,
Leechers: leechers,
DownloadCount: downloads,
Link: t.GUID,
DownloadUrl: t.Link,
InfoHash: t.InfoHash,
MagnetLink: "", // Should be scraped
Resolution: "", // Should be parsed
IsBatch: false, // Should be parsed
EpisodeNumber: -1, // Should be parsed
ReleaseGroup: "", // Should be parsed
Provider: providerName,
IsBestRelease: false,
Confirmed: false,
}
isBatchByGuess := false
episode := -1
if len(metadata.EpisodeNumber) > 1 || comparison.ValueContainsBatchKeywords(t.Name) {
isBatchByGuess = true
}
if len(metadata.EpisodeNumber) == 1 {
episode = util.StringToIntMust(metadata.EpisodeNumber[0])
}
ret.Resolution = metadata.VideoResolution
ret.ReleaseGroup = metadata.ReleaseGroup
// Only change batch status if it wasn't already 'true'
if ret.IsBatch == false && isBatchByGuess {
ret.IsBatch = true
}
ret.EpisodeNumber = episode
return ret
}

View File

@@ -0,0 +1,167 @@
package nyaa
import (
"seanime/internal/api/anilist"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
"github.com/stretchr/testify/require"
)
func TestSearch(t *testing.T) {
nyaaProvider := NewProvider(util.NewLogger(), categoryAnime)
torrents, err := nyaaProvider.Search(hibiketorrent.AnimeSearchOptions{
Query: "One Piece",
})
require.NoError(t, err)
for _, torrent := range torrents {
t.Log(torrent.Name)
}
}
func TestSmartSearch(t *testing.T) {
anilistLimiter := limiter.NewAnilistLimiter()
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
nyaaProvider := NewProvider(util.NewLogger(), categoryAnime)
tests := []struct {
name string
mId int
batch bool
episodeNumber int
absoluteOffset int
resolution string
scrapeMagnet bool
}{
{
name: "Bungou Stray Dogs 5th Season Episode 11",
mId: 163263,
batch: false,
episodeNumber: 11,
absoluteOffset: 45,
resolution: "1080",
scrapeMagnet: true,
},
{
name: "SPY×FAMILY Season 1 Part 2",
mId: 142838,
batch: false,
episodeNumber: 12,
absoluteOffset: 12,
resolution: "1080",
scrapeMagnet: false,
},
{
name: "Jujutsu Kaisen Season 2",
mId: 145064,
batch: false,
episodeNumber: 2,
absoluteOffset: 24,
resolution: "1080",
scrapeMagnet: false,
},
{
name: "Violet Evergarden The Movie",
mId: 103047,
batch: true,
episodeNumber: 1,
absoluteOffset: 0,
resolution: "720",
scrapeMagnet: false,
},
{
name: "Sousou no Frieren",
mId: 154587,
batch: false,
episodeNumber: 10,
absoluteOffset: 0,
resolution: "1080",
scrapeMagnet: false,
},
{
name: "Tokubetsu-hen Hibike! Euphonium: Ensemble",
mId: 150429,
batch: false,
episodeNumber: 1,
absoluteOffset: 0,
resolution: "1080",
scrapeMagnet: false,
},
}
for _, tt := range tests {
anilistLimiter.Wait()
t.Run(tt.name, func(t *testing.T) {
media, err := anilistPlatform.GetAnime(t.Context(), tt.mId)
require.NoError(t, err)
require.NotNil(t, media)
queryMedia := hibiketorrent.Media{
ID: media.GetID(),
IDMal: media.GetIDMal(),
Status: string(*media.GetStatus()),
Format: string(*media.GetFormat()),
EnglishTitle: media.GetTitle().GetEnglish(),
RomajiTitle: media.GetRomajiTitleSafe(),
EpisodeCount: media.GetTotalEpisodeCount(),
AbsoluteSeasonOffset: tt.absoluteOffset,
Synonyms: media.GetSynonymsContainingSeason(),
IsAdult: *media.GetIsAdult(),
StartDate: &hibiketorrent.FuzzyDate{
Year: *media.GetStartDate().GetYear(),
Month: media.GetStartDate().GetMonth(),
Day: media.GetStartDate().GetDay(),
},
}
torrents, err := nyaaProvider.SmartSearch(hibiketorrent.AnimeSmartSearchOptions{
Media: queryMedia,
Query: "",
Batch: tt.batch,
EpisodeNumber: tt.episodeNumber,
Resolution: tt.resolution,
AnidbAID: 0, // Not supported
AnidbEID: 0, // Not supported
BestReleases: false, // Not supported
})
require.NoError(t, err, "error searching nyaa")
for _, torrent := range torrents {
scrapedMagnet := ""
if tt.scrapeMagnet {
magn, err := nyaaProvider.GetTorrentMagnetLink(torrent)
if err == nil {
scrapedMagnet = magn
}
}
t.Log(torrent.Name)
t.Logf("\tMagnet: %s", torrent.MagnetLink)
if scrapedMagnet != "" {
t.Logf("\tMagnet (Scraped): %s", scrapedMagnet)
}
t.Logf("\tEpisodeNumber: %d", torrent.EpisodeNumber)
t.Logf("\tResolution: %s", torrent.Resolution)
t.Logf("\tIsBatch: %v", torrent.IsBatch)
t.Logf("\tConfirmed: %v", torrent.Confirmed)
}
})
}
}

View File

@@ -0,0 +1,159 @@
package nyaa
import (
"errors"
"github.com/gocolly/colly"
"regexp"
"strconv"
"strings"
)
func TorrentFiles(viewURL string) ([]string, error) {
var folders []string
var files []string
c := colly.NewCollector()
c.OnHTML(".folder", func(e *colly.HTMLElement) {
folders = append(folders, e.Text)
})
c.OnHTML(".torrent-file-list", func(e *colly.HTMLElement) {
files = append(files, e.ChildText("li"))
})
var e error
c.OnError(func(r *colly.Response, err error) {
e = err
})
if e != nil {
return nil, e
}
c.Visit(viewURL)
if len(folders) == 0 {
return files, nil
}
return folders, nil
}
func TorrentMagnet(viewURL string) (string, error) {
var magnetLink string
c := colly.NewCollector()
c.OnHTML("a.card-footer-item", func(e *colly.HTMLElement) {
magnetLink = e.Attr("href")
})
var e error
c.OnError(func(r *colly.Response, err error) {
e = err
})
if e != nil {
return "", e
}
c.Visit(viewURL)
if magnetLink == "" {
return "", errors.New("magnet link not found")
}
return magnetLink, nil
}
func TorrentInfo(viewURL string) (title string, seeders int, leechers int, completed int, formattedSize string, infoHash string, magnetLink string, err error) {
c := colly.NewCollector()
c.OnHTML("a.card-footer-item", func(e *colly.HTMLElement) {
magnetLink = e.Attr("href")
})
c.OnHTML(".panel-title", func(e *colly.HTMLElement) {
if title == "" {
title = strings.TrimSpace(e.Text)
}
})
// Find and extract information from the specified div elements
c.OnHTML(".panel-body", func(e *colly.HTMLElement) {
if seeders == 0 {
// Extract seeders
e.ForEach("div:contains('Seeders:') span", func(_ int, el *colly.HTMLElement) {
if el.Attr("style") == "color: green;" {
seeders, _ = strconv.Atoi(el.Text)
}
})
}
if leechers == 0 {
// Extract leechers
e.ForEach("div:contains('Leechers:') span", func(_ int, el *colly.HTMLElement) {
if el.Attr("style") == "color: red;" {
leechers, _ = strconv.Atoi(el.Text)
}
})
}
if completed == 0 {
// Extract completed
e.ForEach("div:contains('Completed:')", func(_ int, el *colly.HTMLElement) {
completed, _ = strconv.Atoi(el.DOM.Parent().Find("div").Next().Next().Next().Text())
})
}
if formattedSize == "" {
// Extract completed
e.ForEach("div:contains('File size:')", func(_ int, el *colly.HTMLElement) {
text := el.DOM.Parent().ChildrenFiltered("div:nth-child(2)").Text()
if !strings.Contains(text, "\t") {
formattedSize = text
}
})
}
if infoHash == "" {
// Extract info hash
e.ForEach("div:contains('Info hash:') kbd", func(_ int, el *colly.HTMLElement) {
infoHash = el.Text
})
}
})
var e error
c.OnError(func(r *colly.Response, err error) {
e = err
})
if e != nil {
err = e
return
}
_ = c.Visit(viewURL)
if magnetLink == "" {
err = errors.New("magnet link not found")
return
}
return
}
func TorrentHash(viewURL string) (string, error) {
magnet, err := TorrentMagnet(viewURL)
if err != nil {
return "", err
}
re := regexp.MustCompile(`magnet:\?xt=urn:btih:([^&]+)`)
match := re.FindStringSubmatch(magnet)
if len(match) > 1 {
return match[1], nil
}
return "", errors.New("could not extract hash")
}

View File

@@ -0,0 +1,54 @@
package nyaa
import (
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
)
func TestTorrentFiles(t *testing.T) {
files, err := TorrentFiles(util.Decode("aHR0cHM6Ly9ueWFhLnNpL3ZpZXcvMTU0MjA1Nw==")) // durarara complete series
assert.NoError(t, err)
t.Log(spew.Sdump(files))
assert.NotEmpty(t, files)
}
func TestTorrentMagnet(t *testing.T) {
magnet, err := TorrentMagnet(util.Decode("aHR0cHM6Ly9ueWFhLnNpL3ZpZXcvMTg4Njg4Ng=="))
assert.NoError(t, err)
t.Log(magnet)
assert.NotEmpty(t, magnet)
}
func TestTorrentInfo(t *testing.T) {
title, a, b, c, fs, d, e, err := TorrentInfo(util.Decode("aHR0cHM6Ly9ueWFhLnNpL3ZpZXcvMTcyNzkyMg=="))
assert.NoError(t, err)
t.Logf("Title: %s\n", title)
t.Logf("Seeders: %d\n", a)
t.Logf("Leechers: %d\n", b)
t.Logf("Downloads: %d\n", c)
t.Logf("Formatted Size: %s\n", fs)
t.Logf("Info Hash: %s\n", d)
t.Logf("Download link: %s\n", e)
}
func TestTorrentHash(t *testing.T) {
hash, err := TorrentHash(util.Decode("aHR0cHM6Ly9ueWFhLnNpL3ZpZXcvMTc0MTY5MQ=="))
assert.NoError(t, err)
t.Log(hash)
assert.NotEmpty(t, hash)
}

View File

@@ -0,0 +1,133 @@
package nyaa
import (
"seanime/internal/extension"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"sync"
"github.com/mmcdole/gofeed"
"github.com/rs/zerolog"
)
const (
SukebeiProviderName = "nyaa-sukebei"
)
type SukebeiProvider struct {
logger *zerolog.Logger
baseUrl string
}
func NewSukebeiProvider(logger *zerolog.Logger) hibiketorrent.AnimeProvider {
return &SukebeiProvider{
logger: logger,
}
}
func (n *SukebeiProvider) SetSavedUserConfig(config extension.SavedUserConfig) {
n.baseUrl, _ = config.Values["apiUrl"]
}
func (n *SukebeiProvider) GetSettings() hibiketorrent.AnimeProviderSettings {
return hibiketorrent.AnimeProviderSettings{
Type: hibiketorrent.AnimeProviderTypeSpecial,
CanSmartSearch: false,
SupportsAdult: true,
}
}
func (n *SukebeiProvider) GetLatest() (ret []*hibiketorrent.AnimeTorrent, err error) {
fp := gofeed.NewParser()
url, err := buildURL(n.baseUrl, BuildURLOptions{
Provider: "sukebei",
Query: "",
Category: "art-anime",
SortBy: "seeders",
Filter: "",
})
if err != nil {
return nil, err
}
// get content
feed, err := fp.ParseURL(url)
if err != nil {
return nil, err
}
// parse content
res := convertRSS(feed)
wg := sync.WaitGroup{}
mu := sync.Mutex{}
for _, torrent := range res {
wg.Add(1)
go func(torrent Torrent) {
defer wg.Done()
mu.Lock()
ret = append(ret, torrent.toAnimeTorrent(SukebeiProviderName))
mu.Unlock()
}(torrent)
}
wg.Wait()
return
}
func (n *SukebeiProvider) Search(opts hibiketorrent.AnimeSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
fp := gofeed.NewParser()
n.logger.Trace().Str("query", opts.Query).Msg("nyaa: Search query")
url, err := buildURL(n.baseUrl, BuildURLOptions{
Provider: "sukebei",
Query: opts.Query,
Category: "art-anime",
SortBy: "seeders",
Filter: "",
})
if err != nil {
return nil, err
}
// get content
feed, err := fp.ParseURL(url)
if err != nil {
return nil, err
}
// parse content
res := convertRSS(feed)
wg := sync.WaitGroup{}
mu := sync.Mutex{}
for _, torrent := range res {
wg.Add(1)
go func(torrent Torrent) {
defer wg.Done()
mu.Lock()
ret = append(ret, torrent.toAnimeTorrent(SukebeiProviderName))
mu.Unlock()
}(torrent)
}
wg.Wait()
return
}
func (n *SukebeiProvider) SmartSearch(opts hibiketorrent.AnimeSmartSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
return
}
func (n *SukebeiProvider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return TorrentHash(torrent.Link)
}
func (n *SukebeiProvider) GetTorrentMagnetLink(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return TorrentMagnet(torrent.Link)
}

View File

@@ -0,0 +1,149 @@
package seadex
import (
"context"
"github.com/5rahim/habari"
"github.com/rs/zerolog"
"net/http"
"seanime/internal/torrents/nyaa"
"sync"
"time"
hibiketorrent "seanime/internal/extension/hibike/torrent"
)
const (
ProviderName = "seadex"
)
type Provider struct {
logger *zerolog.Logger
seadex *SeaDex
}
func NewProvider(logger *zerolog.Logger) hibiketorrent.AnimeProvider {
return &Provider{
logger: logger,
seadex: New(logger),
}
}
func (n *Provider) GetSettings() hibiketorrent.AnimeProviderSettings {
return hibiketorrent.AnimeProviderSettings{
Type: hibiketorrent.AnimeProviderTypeSpecial,
CanSmartSearch: true, // Setting to true to allow previews
SupportsAdult: false,
}
}
func (n *Provider) GetType() hibiketorrent.AnimeProviderType {
return hibiketorrent.AnimeProviderTypeSpecial
}
func (n *Provider) GetLatest() (ret []*hibiketorrent.AnimeTorrent, err error) {
return
}
func (n *Provider) Search(opts hibiketorrent.AnimeSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
return n.findTorrents(&opts.Media)
}
func (n *Provider) SmartSearch(opts hibiketorrent.AnimeSmartSearchOptions) (ret []*hibiketorrent.AnimeTorrent, err error) {
return n.findTorrents(&opts.Media)
}
func (n *Provider) findTorrents(media *hibiketorrent.Media) (ret []*hibiketorrent.AnimeTorrent, err error) {
seadexTorrents, err := n.seadex.FetchTorrents(media.ID, media.RomajiTitle)
if err != nil {
return nil, err
}
wg := sync.WaitGroup{}
mu := sync.Mutex{}
wg.Add(len(seadexTorrents))
for _, t := range seadexTorrents {
go func(t *Torrent) {
defer wg.Done()
mu.Lock()
ret = append(ret, t.toAnimeTorrent(ProviderName))
mu.Unlock()
}(t)
}
wg.Wait()
return
}
//--------------------------------------------------------------------------------------------------------------------------------------------------//
func (n *Provider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return torrent.MagnetLink, nil
}
func (n *Provider) GetTorrentMagnetLink(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return nyaa.TorrentMagnet(torrent.Link)
}
func (t *Torrent) toAnimeTorrent(providerName string) *hibiketorrent.AnimeTorrent {
metadata := habari.Parse(t.Name)
ret := &hibiketorrent.AnimeTorrent{
Name: t.Name,
Date: t.Date,
Size: 0, // Should be scraped
FormattedSize: "", // Should be scraped
Seeders: 0, // Should be scraped
Leechers: 0, // Should be scraped
DownloadCount: 0, // Should be scraped
Link: t.Link,
DownloadUrl: "", // Should be scraped
InfoHash: t.InfoHash,
MagnetLink: "", // Should be scraped
Resolution: "", // Should be parsed
IsBatch: true, // Should be parsed
EpisodeNumber: -1, // Should be parsed
ReleaseGroup: "", // Should be parsed
Provider: providerName,
IsBestRelease: true,
Confirmed: true,
}
var seeders, leechers, downloads int
var title, downloadUrl, formattedSize string
// Try scraping from Nyaa
// Since nyaa tends to be blocked, try for a few seconds only
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if t.Link != "" {
downloadUrl = t.Link
client := http.DefaultClient
req, err := http.NewRequestWithContext(ctx, http.MethodGet, ret.Link, nil)
if err == nil {
resp, err := client.Do(req)
if err == nil {
defer resp.Body.Close()
title, seeders, leechers, downloads, formattedSize, _, _, err = nyaa.TorrentInfo(ret.Link)
if err == nil && title != "" {
ret.Name = title // Override title
ret.Seeders = seeders
ret.Leechers = leechers
ret.DownloadCount = downloads
ret.DownloadUrl = downloadUrl
ret.Size = 1
ret.FormattedSize = formattedSize
}
}
}
}
ret.Resolution = metadata.VideoResolution
ret.ReleaseGroup = metadata.ReleaseGroup
return ret
}

View File

@@ -0,0 +1,109 @@
package seadex
import (
"fmt"
"net/http"
"seanime/internal/extension"
"seanime/internal/util"
"strings"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
type (
SeaDex struct {
logger *zerolog.Logger
uri string
}
Torrent struct {
Name string `json:"name"`
Date string `json:"date"`
Size int64 `json:"size"`
Link string `json:"link"`
InfoHash string `json:"infoHash"`
ReleaseGroup string `json:"releaseGroup,omitempty"`
}
)
func New(logger *zerolog.Logger) *SeaDex {
return &SeaDex{
logger: logger,
uri: util.Decode("aHR0cHM6Ly9yZWxlYXNlcy5tb2UvYXBpL2NvbGxlY3Rpb25zL2VudHJpZXMvcmVjb3Jkcw=="),
}
}
func (s *SeaDex) SetSavedUserConfig(savedConfig *extension.SavedUserConfig) {
url, _ := savedConfig.Values["apiUrl"]
if url != "" {
s.uri = url
}
}
func (s *SeaDex) FetchTorrents(mediaId int, title string) (ret []*Torrent, err error) {
ret = make([]*Torrent, 0)
records, err := s.fetchRecords(mediaId)
if err != nil {
return nil, err
}
if len(records) == 0 {
return ret, nil
}
if len(records[0].Expand.Trs) == 0 {
return ret, nil
}
for _, tr := range records[0].Expand.Trs {
if tr.InfoHash == "" || tr.InfoHash == "<redacted>" || tr.Tracker != "Nyaa" || !strings.Contains(tr.URL, "nyaa.si") {
continue
}
ret = append(ret, &Torrent{
Name: fmt.Sprintf("[%s] %s%s", tr.ReleaseGroup, title, map[bool]string{true: " [Dual-Audio]", false: ""}[tr.DualAudio]),
Date: tr.Created,
Size: int64(s.getTorrentSize(tr.Files)),
Link: tr.URL,
InfoHash: tr.InfoHash,
ReleaseGroup: tr.ReleaseGroup,
})
}
return ret, nil
}
func (s *SeaDex) fetchRecords(mediaId int) (ret []*RecordItem, err error) {
uri := fmt.Sprintf("%s?page=1&perPage=1&filter=alID%%3D%%22%d%%22&skipTotal=1&expand=trs", s.uri, mediaId)
resp, err := http.Get(uri)
if err != nil {
s.logger.Error().Err(err).Msgf("seadex: error getting media records: %v", mediaId)
return nil, err
}
defer resp.Body.Close()
var res RecordsResponse
if err = json.NewDecoder(resp.Body).Decode(&res); err != nil {
s.logger.Error().Err(err).Msgf("seadex: error decoding response: %v", mediaId)
return nil, err
}
return res.Items, nil
}
func (s *SeaDex) getTorrentSize(fls []*TrFile) int {
if fls == nil || len(fls) == 0 {
return 0
}
var size int
for _, f := range fls {
size += f.Length
}
return size
}

View File

@@ -0,0 +1,48 @@
package seadex
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"seanime/internal/api/anilist"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestSeaDex(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
tests := []struct {
name string
mediaId int
}{
{
name: "86 - Eighty Six Part 2",
mediaId: 131586,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mediaF, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
if assert.NoErrorf(t, err, "error getting media: %v", tt.mediaId) {
media := mediaF.GetMedia()
torrents, err := New(util.NewLogger()).FetchTorrents(tt.mediaId, media.GetRomajiTitleSafe())
if assert.NoErrorf(t, err, "error fetching records: %v", tt.mediaId) {
spew.Dump(torrents)
}
}
})
}
}

View File

@@ -0,0 +1,42 @@
package seadex
type (
RecordsResponse struct {
Items []*RecordItem `json:"items"`
}
RecordItem struct {
AlID int `json:"alID"`
CollectionID string `json:"collectionId"`
CollectionName string `json:"collectionName"`
Comparison string `json:"comparison"`
Created string `json:"created"`
Expand struct {
Trs []*Tr `json:"trs"`
} `json:"expand"`
Trs []string `json:"trs"`
Updated string `json:"updated"`
ID string `json:"id"`
Incomplete bool `json:"incomplete"`
Notes string `json:"notes"`
TheoreticalBest string `json:"theoreticalBest"`
}
Tr struct {
Created string `json:"created"`
CollectionID string `json:"collectionId"`
CollectionName string `json:"collectionName"`
DualAudio bool `json:"dualAudio"`
Files []*TrFile `json:"files"`
ID string `json:"id"`
InfoHash string `json:"infoHash"`
IsBest bool `json:"isBest"`
ReleaseGroup string `json:"releaseGroup"`
Tracker string `json:"tracker"`
URL string `json:"url"`
}
TrFile struct {
Length int `json:"length"`
Name string `json:"name"`
}
)

View File

@@ -0,0 +1,2 @@
Do not import:
- torrent_client

View File

@@ -0,0 +1,142 @@
package torrent
import (
"seanime/internal/api/metadata"
"seanime/internal/extension"
"seanime/internal/util/result"
"sync"
"github.com/rs/zerolog"
)
type (
Repository struct {
logger *zerolog.Logger
extensionBank *extension.UnifiedBank
animeProviderSearchCaches *result.Map[string, *result.Cache[string, *SearchData]]
animeProviderSmartSearchCaches *result.Map[string, *result.Cache[string, *SearchData]]
settings RepositorySettings
metadataProvider metadata.Provider
mu sync.Mutex
}
RepositorySettings struct {
DefaultAnimeProvider string // Default torrent provider
}
)
type NewRepositoryOptions struct {
Logger *zerolog.Logger
MetadataProvider metadata.Provider
}
func NewRepository(opts *NewRepositoryOptions) *Repository {
ret := &Repository{
logger: opts.Logger,
metadataProvider: opts.MetadataProvider,
extensionBank: extension.NewUnifiedBank(),
animeProviderSearchCaches: result.NewResultMap[string, *result.Cache[string, *SearchData]](),
animeProviderSmartSearchCaches: result.NewResultMap[string, *result.Cache[string, *SearchData]](),
settings: RepositorySettings{},
mu: sync.Mutex{},
}
return ret
}
func (r *Repository) InitExtensionBank(bank *extension.UnifiedBank) {
r.mu.Lock()
defer r.mu.Unlock()
r.extensionBank = bank
go func() {
for {
select {
case <-bank.OnExtensionAdded():
//r.logger.Debug().Msg("torrent repo: Anime provider extension added")
r.OnExtensionReloaded()
}
}
}()
go func() {
for {
select {
case <-bank.OnExtensionRemoved():
r.OnExtensionReloaded()
}
}
}()
r.logger.Debug().Msg("torrent repo: Initialized anime provider extension bank")
}
func (r *Repository) OnExtensionReloaded() {
r.mu.Lock()
defer r.mu.Unlock()
r.reloadExtensions()
}
// This is called each time a new extension is added or removed
func (r *Repository) reloadExtensions() {
// Clear the search caches
r.animeProviderSearchCaches = result.NewResultMap[string, *result.Cache[string, *SearchData]]()
r.animeProviderSmartSearchCaches = result.NewResultMap[string, *result.Cache[string, *SearchData]]()
go func() {
// Create new caches for each provider
extension.RangeExtensions(r.extensionBank, func(provider string, value extension.AnimeTorrentProviderExtension) bool {
r.animeProviderSearchCaches.Set(provider, result.NewCache[string, *SearchData]())
r.animeProviderSmartSearchCaches.Set(provider, result.NewCache[string, *SearchData]())
return true
})
}()
// Check if the default provider is in the list of providers
//if r.settings.DefaultAnimeProvider != "" && r.settings.DefaultAnimeProvider != "none" {
// if _, ok := r.extensionBank.Get(r.settings.DefaultAnimeProvider); !ok {
// //r.logger.Error().Str("defaultProvider", r.settings.DefaultAnimeProvider).Msg("torrent repo: Default torrent provider not found in extensions")
// // Set the default provider to empty
// r.settings.DefaultAnimeProvider = ""
// }
//}
//r.logger.Trace().Str("defaultProvider", r.settings.DefaultAnimeProvider).Msg("torrent repo: Reloaded extensions")
}
// SetSettings should be called after the repository is created and settings are refreshed
func (r *Repository) SetSettings(s *RepositorySettings) {
r.mu.Lock()
defer r.mu.Unlock()
r.logger.Trace().Msg("torrent repo: Setting settings")
if s != nil {
r.settings = *s
} else {
r.settings = RepositorySettings{
DefaultAnimeProvider: "",
}
}
if r.settings.DefaultAnimeProvider == "none" {
r.settings.DefaultAnimeProvider = ""
}
// Reload extensions after settings change
r.reloadExtensions()
}
func (r *Repository) GetDefaultAnimeProviderExtension() (extension.AnimeTorrentProviderExtension, bool) {
r.mu.Lock()
defer r.mu.Unlock()
if r.settings.DefaultAnimeProvider == "" {
return nil, false
}
return extension.GetExtension[extension.AnimeTorrentProviderExtension](r.extensionBank, r.settings.DefaultAnimeProvider)
}
func (r *Repository) GetAnimeProviderExtension(id string) (extension.AnimeTorrentProviderExtension, bool) {
return extension.GetExtension[extension.AnimeTorrentProviderExtension](r.extensionBank, id)
}

View File

@@ -0,0 +1,67 @@
package torrent
import (
"seanime/internal/api/metadata"
"seanime/internal/extension"
"seanime/internal/torrents/animetosho"
"seanime/internal/torrents/nyaa"
"seanime/internal/torrents/seadex"
"seanime/internal/util"
"testing"
)
func getTestRepo(t *testing.T) *Repository {
logger := util.NewLogger()
metadataProvider := metadata.GetMockProvider(t)
extensionBank := extension.NewUnifiedBank()
extensionBank.Set("nyaa", extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: "nyaa",
Name: "Nyaa",
Version: "1.0.0",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
}, nyaa.NewProvider(logger, nyaa.CategoryAnimeEng)))
extensionBank.Set("nyaa-sukebei", extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: "nyaa-sukebei",
Name: "Nyaa Sukebei",
Version: "1.0.0",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
}, nyaa.NewSukebeiProvider(logger)))
extensionBank.Set("animetosho", extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: "animetosho",
Name: "AnimeTosho",
Version: "1.0.0",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
}, animetosho.NewProvider(logger)))
extensionBank.Set("seadex", extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: "seadex",
Name: "SeaDex",
Version: "1.0.0",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
Author: "Seanime",
}, seadex.NewProvider(logger)))
repo := NewRepository(&NewRepositoryOptions{
Logger: logger,
MetadataProvider: metadataProvider,
})
repo.InitExtensionBank(extensionBank)
repo.SetSettings(&RepositorySettings{
DefaultAnimeProvider: ProviderAnimeTosho,
})
return repo
}

View File

@@ -0,0 +1,463 @@
package torrent
import (
"cmp"
"context"
"fmt"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/debrid/debrid"
"seanime/internal/extension"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/library/anime"
"seanime/internal/util"
"seanime/internal/util/comparison"
"seanime/internal/util/result"
"slices"
"strconv"
"sync"
"github.com/5rahim/habari"
"github.com/samber/lo"
"github.com/samber/mo"
)
const (
AnimeSearchTypeSmart AnimeSearchType = "smart"
AnimeSearchTypeSimple AnimeSearchType = "simple"
)
var (
metadataCache = result.NewResultMap[string, *TorrentMetadata]()
)
type (
AnimeSearchType string
AnimeSearchOptions struct {
// Provider extension ID
Provider string
Type AnimeSearchType
Media *anilist.BaseAnime
// Search options
Query string
// Filter options
Batch bool
EpisodeNumber int
BestReleases bool
Resolution string
}
// Preview contains the torrent and episode information
Preview struct {
Episode *anime.Episode `json:"episode"` // nil if batch
Torrent *hibiketorrent.AnimeTorrent `json:"torrent"`
}
TorrentMetadata struct {
Distance int `json:"distance"`
Metadata *habari.Metadata `json:"metadata"`
}
// SearchData is the struct returned by NewSmartSearch
SearchData struct {
Torrents []*hibiketorrent.AnimeTorrent `json:"torrents"` // Torrents found
Previews []*Preview `json:"previews"` // TorrentPreview for each torrent
TorrentMetadata map[string]*TorrentMetadata `json:"torrentMetadata"` // Torrent metadata
DebridInstantAvailability map[string]debrid.TorrentItemInstantAvailability `json:"debridInstantAvailability"` // Debrid instant availability
AnimeMetadata *metadata.AnimeMetadata `json:"animeMetadata"` // Animap media
}
)
func (r *Repository) SearchAnime(ctx context.Context, opts AnimeSearchOptions) (ret *SearchData, err error) {
defer util.HandlePanicInModuleWithError("torrents/torrent/SearchAnime", &err)
r.logger.Debug().Str("provider", opts.Provider).Str("type", string(opts.Type)).Str("query", opts.Query).Msg("torrent repo: Searching for anime torrents")
// Find the provider by ID
providerExtension, ok := extension.GetExtension[extension.AnimeTorrentProviderExtension](r.extensionBank, opts.Provider)
if !ok {
// Get the default provider
providerExtension, ok = r.GetDefaultAnimeProviderExtension()
if !ok {
return nil, fmt.Errorf("torrent provider not found")
}
}
if opts.Type == AnimeSearchTypeSmart && !providerExtension.GetProvider().GetSettings().CanSmartSearch {
return nil, fmt.Errorf("provider does not support smart search")
}
var torrents []*hibiketorrent.AnimeTorrent
// Fetch Animap media
animeMetadata := mo.None[*metadata.AnimeMetadata]()
animeMetadataF, err := r.metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, opts.Media.GetID())
if err == nil {
animeMetadata = mo.Some(animeMetadataF)
}
queryMedia := hibiketorrent.Media{
ID: opts.Media.GetID(),
IDMal: opts.Media.GetIDMal(),
Status: string(*opts.Media.GetStatus()),
Format: string(*opts.Media.GetFormat()),
EnglishTitle: opts.Media.GetTitle().GetEnglish(),
RomajiTitle: opts.Media.GetRomajiTitleSafe(),
EpisodeCount: opts.Media.GetTotalEpisodeCount(),
AbsoluteSeasonOffset: 0,
Synonyms: opts.Media.GetSynonymsContainingSeason(),
IsAdult: *opts.Media.GetIsAdult(),
StartDate: &hibiketorrent.FuzzyDate{
Year: *opts.Media.GetStartDate().GetYear(),
Month: opts.Media.GetStartDate().GetMonth(),
Day: opts.Media.GetStartDate().GetDay(),
},
}
//// Force simple search if Animap media is absent
//if opts.Type == AnimeSearchTypeSmart && animeMetadata.IsAbsent() {
// opts.Type = AnimeSearchTypeSimple
//}
var queryKey string
switch opts.Type {
case AnimeSearchTypeSmart:
anidbAID := 0
anidbEID := 0
// Get the AniDB Anime ID and Episode ID
if animeMetadata.IsPresent() {
// Override absolute offset value of queryMedia
queryMedia.AbsoluteSeasonOffset = animeMetadata.MustGet().GetOffset()
if animeMetadata.MustGet().GetMappings() != nil {
anidbAID = animeMetadata.MustGet().GetMappings().AnidbId
// Find Animap Episode based on inputted episode number
episodeMetadata, found := animeMetadata.MustGet().FindEpisode(strconv.Itoa(opts.EpisodeNumber))
if found {
anidbEID = episodeMetadata.AnidbEid
}
}
}
queryKey = fmt.Sprintf("%d-%s-%d-%d-%d-%s-%t-%t", opts.Media.GetID(), opts.Query, opts.EpisodeNumber, anidbAID, anidbEID, opts.Resolution, opts.BestReleases, opts.Batch)
if cache, found := r.animeProviderSmartSearchCaches.Get(opts.Provider); found {
// Check the cache
data, found := cache.Get(queryKey)
if found {
r.logger.Debug().Str("provider", opts.Provider).Str("type", string(opts.Type)).Msg("torrent repo: Cache HIT")
return data, nil
}
}
// Check for context cancellation before making the request
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
torrents, err = providerExtension.GetProvider().SmartSearch(hibiketorrent.AnimeSmartSearchOptions{
Media: queryMedia,
Query: opts.Query,
Batch: opts.Batch,
EpisodeNumber: opts.EpisodeNumber,
Resolution: opts.Resolution,
AnidbAID: anidbAID,
AnidbEID: anidbEID,
BestReleases: opts.BestReleases,
})
torrents = lo.UniqBy(torrents, func(t *hibiketorrent.AnimeTorrent) string {
return t.InfoHash
})
case AnimeSearchTypeSimple:
queryKey = fmt.Sprintf("%d-%s", opts.Media.GetID(), opts.Query)
if cache, found := r.animeProviderSearchCaches.Get(opts.Provider); found {
// Check the cache
data, found := cache.Get(queryKey)
if found {
r.logger.Debug().Str("provider", opts.Provider).Str("type", string(opts.Type)).Msg("torrent repo: Cache HIT")
return data, nil
}
}
// Check for context cancellation before making the request
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
torrents, err = providerExtension.GetProvider().Search(hibiketorrent.AnimeSearchOptions{
Media: queryMedia,
Query: opts.Query,
})
}
if err != nil {
return nil, err
}
//
// Torrent metadata
//
torrentMetadata := make(map[string]*TorrentMetadata)
mu := sync.Mutex{}
wg := sync.WaitGroup{}
wg.Add(len(torrents))
for _, t := range torrents {
go func(t *hibiketorrent.AnimeTorrent) {
defer wg.Done()
metadata, found := metadataCache.Get(t.Name)
if !found {
m := habari.Parse(t.Name)
var distance *comparison.LevenshteinResult
distance, ok := comparison.FindBestMatchWithLevenshtein(&m.Title, opts.Media.GetAllTitles())
if !ok {
distance = &comparison.LevenshteinResult{
Distance: 1000,
}
}
metadata = &TorrentMetadata{
Distance: distance.Distance,
Metadata: m,
}
metadataCache.Set(t.Name, metadata)
}
mu.Lock()
torrentMetadata[t.InfoHash] = metadata
mu.Unlock()
}(t)
}
wg.Wait()
//
// Previews
//
previews := make([]*Preview, 0)
if opts.Type == AnimeSearchTypeSmart {
wg := sync.WaitGroup{}
wg.Add(len(torrents))
for _, t := range torrents {
go func(t *hibiketorrent.AnimeTorrent) {
defer wg.Done()
// Check for context cancellation in each goroutine
select {
case <-ctx.Done():
return
default:
}
preview := r.createAnimeTorrentPreview(createAnimeTorrentPreviewOptions{
torrent: t,
media: opts.Media,
animeMetadata: animeMetadata,
searchOpts: &opts,
})
if preview != nil {
previews = append(previews, preview)
}
}(t)
}
wg.Wait()
// Check if context was cancelled during preview creation
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
}
// sort both by seeders
slices.SortFunc(torrents, func(i, j *hibiketorrent.AnimeTorrent) int {
return cmp.Compare(j.Seeders, i.Seeders)
})
previews = lo.Filter(previews, func(p *Preview, _ int) bool {
return p != nil && p.Torrent != nil
})
slices.SortFunc(previews, func(i, j *Preview) int {
return cmp.Compare(j.Torrent.Seeders, i.Torrent.Seeders)
})
ret = &SearchData{
Torrents: torrents,
Previews: previews,
TorrentMetadata: torrentMetadata,
}
if animeMetadata.IsPresent() {
ret.AnimeMetadata = animeMetadata.MustGet()
}
// Store the data in the cache
switch opts.Type {
case AnimeSearchTypeSmart:
if cache, found := r.animeProviderSmartSearchCaches.Get(opts.Provider); found {
cache.Set(queryKey, ret)
}
case AnimeSearchTypeSimple:
if cache, found := r.animeProviderSearchCaches.Get(opts.Provider); found {
cache.Set(queryKey, ret)
}
}
return
}
type createAnimeTorrentPreviewOptions struct {
torrent *hibiketorrent.AnimeTorrent
media *anilist.BaseAnime
animeMetadata mo.Option[*metadata.AnimeMetadata]
searchOpts *AnimeSearchOptions
}
func (r *Repository) createAnimeTorrentPreview(opts createAnimeTorrentPreviewOptions) *Preview {
var parsedData *habari.Metadata
metadata, found := metadataCache.Get(opts.torrent.Name)
if !found { // Should always be found
parsedData = habari.Parse(opts.torrent.Name)
metadataCache.Set(opts.torrent.Name, &TorrentMetadata{
Distance: 1000,
Metadata: parsedData,
})
}
parsedData = metadata.Metadata
isBatch := opts.torrent.IsBestRelease ||
opts.torrent.IsBatch ||
comparison.ValueContainsBatchKeywords(opts.torrent.Name) || // Contains batch keywords
(!opts.media.IsMovieOrSingleEpisode() && len(parsedData.EpisodeNumber) > 1) // Multiple episodes parsed & not a movie
if opts.torrent.ReleaseGroup == "" {
opts.torrent.ReleaseGroup = parsedData.ReleaseGroup
}
if opts.torrent.Resolution == "" {
opts.torrent.Resolution = parsedData.VideoResolution
}
if opts.torrent.FormattedSize == "" {
opts.torrent.FormattedSize = util.Bytes(uint64(opts.torrent.Size))
}
if isBatch {
return &Preview{
Episode: nil, // Will be displayed as batch
Torrent: opts.torrent,
}
}
// If past this point we haven't detected a batch but the episode number returned from the provider is -1
// we will parse it from the torrent name
if opts.torrent.EpisodeNumber == -1 && len(parsedData.EpisodeNumber) == 1 {
opts.torrent.EpisodeNumber = util.StringToIntMust(parsedData.EpisodeNumber[0])
}
// If the torrent is confirmed, use the episode number from the search options
// because it could be absolute
if opts.torrent.Confirmed {
opts.torrent.EpisodeNumber = opts.searchOpts.EpisodeNumber
}
// If there was no single episode number parsed but the media is movie, set the episode number to 1
if opts.torrent.EpisodeNumber == -1 && opts.media.IsMovieOrSingleEpisode() {
opts.torrent.EpisodeNumber = 1
}
if opts.animeMetadata.IsPresent() {
// normalize episode number
if opts.torrent.EpisodeNumber >= 0 && opts.torrent.EpisodeNumber > opts.media.GetCurrentEpisodeCount() {
opts.torrent.EpisodeNumber = opts.torrent.EpisodeNumber - opts.animeMetadata.MustGet().GetOffset()
}
animeMetadata := opts.animeMetadata.MustGet()
_, foundEp := animeMetadata.FindEpisode(strconv.Itoa(opts.searchOpts.EpisodeNumber))
if foundEp {
var episode *anime.Episode
// Remove the episode if the parsed episode number is not the same as the search option
if isProbablySameEpisode(parsedData.EpisodeNumber, opts.searchOpts.EpisodeNumber, opts.animeMetadata.MustGet().GetOffset()) {
ep := opts.searchOpts.EpisodeNumber
episode = anime.NewEpisode(&anime.NewEpisodeOptions{
LocalFile: nil,
OptionalAniDBEpisode: strconv.Itoa(ep),
AnimeMetadata: animeMetadata,
Media: opts.media,
ProgressOffset: 0,
IsDownloaded: false,
MetadataProvider: r.metadataProvider,
})
episode.IsInvalid = false
if episode.DisplayTitle == "" {
episode.DisplayTitle = parsedData.Title
}
}
return &Preview{
Episode: episode,
Torrent: opts.torrent,
}
}
var episode *anime.Episode
// Remove the episode if the parsed episode number is not the same as the search option
if isProbablySameEpisode(parsedData.EpisodeNumber, opts.searchOpts.EpisodeNumber, opts.animeMetadata.MustGet().GetOffset()) {
displayTitle := ""
if len(parsedData.EpisodeNumber) == 1 && parsedData.EpisodeNumber[0] != strconv.Itoa(opts.searchOpts.EpisodeNumber) {
displayTitle = fmt.Sprintf("Episode %s", parsedData.EpisodeNumber[0])
}
// If the episode number could not be found in the Animap media, create a new episode
episode = &anime.Episode{
Type: anime.LocalFileTypeMain,
DisplayTitle: displayTitle,
EpisodeTitle: "",
EpisodeNumber: opts.searchOpts.EpisodeNumber,
ProgressNumber: opts.searchOpts.EpisodeNumber,
AniDBEpisode: "",
AbsoluteEpisodeNumber: 0,
LocalFile: nil,
IsDownloaded: false,
EpisodeMetadata: anime.NewEpisodeMetadata(opts.animeMetadata.MustGet(), nil, opts.media, r.metadataProvider),
FileMetadata: nil,
IsInvalid: false,
MetadataIssue: "",
BaseAnime: opts.media,
}
}
return &Preview{
Episode: episode,
Torrent: opts.torrent,
}
}
return &Preview{
Episode: nil,
Torrent: opts.torrent,
}
}
func isProbablySameEpisode(parsedEpisode []string, searchEpisode int, absoluteOffset int) bool {
if len(parsedEpisode) == 1 {
if util.StringToIntMust(parsedEpisode[0]) == searchEpisode || util.StringToIntMust(parsedEpisode[0]) == searchEpisode+absoluteOffset {
return true
}
}
return false
}

View File

@@ -0,0 +1,111 @@
package torrent
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestSmartSearch(t *testing.T) {
test_utils.InitTestProvider(t)
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
repo := getTestRepo(t)
tests := []struct {
smartSearch bool
query string
episodeNumber int
batch bool
mediaId int
absoluteOffset int
resolution string
provider string
}{
{
smartSearch: true,
query: "",
episodeNumber: 5,
batch: false,
mediaId: 162670, // Dr. Stone S3
absoluteOffset: 48,
resolution: "1080",
provider: "animetosho",
},
{
smartSearch: true,
query: "",
episodeNumber: 1,
batch: true,
mediaId: 77, // Mahou Shoujo Lyrical Nanoha A's
absoluteOffset: 0,
resolution: "1080",
provider: "animetosho",
},
{
smartSearch: true,
query: "",
episodeNumber: 1,
batch: true,
mediaId: 109731, // Hibike Season 3
absoluteOffset: 0,
resolution: "1080",
provider: "animetosho",
},
{
smartSearch: true,
query: "",
episodeNumber: 1,
batch: true,
mediaId: 1915, // Magical Girl Lyrical Nanoha StrikerS
absoluteOffset: 0,
resolution: "",
provider: "animetosho",
},
}
for _, tt := range tests {
t.Run(tt.query, func(t *testing.T) {
media, err := anilistPlatform.GetAnime(t.Context(), tt.mediaId)
if err != nil {
t.Fatalf("could not fetch media id %d", tt.mediaId)
}
data, err := repo.SearchAnime(context.Background(), AnimeSearchOptions{
Provider: tt.provider,
Type: AnimeSearchTypeSmart,
Media: media,
Query: "",
Batch: tt.batch,
EpisodeNumber: tt.episodeNumber,
BestReleases: false,
Resolution: tt.resolution,
})
if err != nil {
t.Errorf("NewSmartSearch() failed: %v", err)
}
t.Log("----------------------- Previews --------------------------")
for _, preview := range data.Previews {
t.Logf("> %s", preview.Torrent.Name)
if preview.Episode != nil {
t.Logf("\t\t %s", preview.Episode.DisplayTitle)
} else {
t.Logf("\t\t Batch")
}
}
t.Log("----------------------- Torrents --------------------------")
for _, torrent := range data.Torrents {
t.Logf("> %s", torrent.Name)
}
})
}
}

View File

@@ -0,0 +1,7 @@
package torrent
const (
ProviderNyaa = "nyaa"
ProviderAnimeTosho = "animetosho"
ProviderNone = "none"
)

View File

@@ -0,0 +1,20 @@
package torrent
import (
"bytes"
"github.com/anacrolix/torrent/metainfo"
)
func StrDataToMagnetLink(data string) (string, error) {
meta, err := metainfo.Load(bytes.NewReader([]byte(data)))
if err != nil {
return "", err
}
magnetLink, err := meta.MagnetV2()
if err != nil {
return "", err
}
return magnetLink.String(), nil
}

View File

@@ -0,0 +1,43 @@
package torrent
import (
"io"
"net/http"
"testing"
)
func TestFileToMagnetLink(t *testing.T) {
tests := []struct {
name string
url string
}{
{
name: "1",
url: "https://animetosho.org/storage/torrent/da9aad67b6f8bb82757bb3ef95235b42624c34f7/%5BSubsPlease%5D%20Make%20Heroine%20ga%20Oosugiru%21%20-%2011%20%281080p%29%20%5B58B3496A%5D.torrent",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
client := http.Client{}
resp, err := client.Get(test.url)
if err != nil {
t.Fatalf("Error: %v", err)
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
t.Fatalf("Error: %v", err)
}
magnet, err := StrDataToMagnetLink(string(data))
if err != nil {
t.Fatalf("Error: %v", err)
}
t.Log(magnet)
})
}
}