node build fixed
This commit is contained in:
497
seanime-2.9.10/internal/manga/chapter_container.go
Normal file
497
seanime-2.9.10/internal/manga/chapter_container.go
Normal file
@@ -0,0 +1,497 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/extension"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/hook"
|
||||
manga_providers "seanime/internal/manga/providers"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"seanime/internal/util/result"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/samber/lo"
|
||||
)
|
||||
|
||||
type (
|
||||
// ChapterContainer is used to display the list of chapters from a provider in the client.
|
||||
// It is cached in a unique file cache bucket with a key of the format: {provider}${mediaId}
|
||||
ChapterContainer struct {
|
||||
MediaId int `json:"mediaId"`
|
||||
Provider string `json:"provider"`
|
||||
Chapters []*hibikemanga.ChapterDetails `json:"chapters"`
|
||||
}
|
||||
)
|
||||
|
||||
func getMangaChapterContainerCacheKey(provider string, mediaId int) string {
|
||||
return fmt.Sprintf("%s$%d", provider, mediaId)
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
type GetMangaChapterContainerOptions struct {
|
||||
Provider string
|
||||
MediaId int
|
||||
Titles []*string
|
||||
Year int
|
||||
}
|
||||
|
||||
// GetMangaChapterContainer returns the ChapterContainer for a manga entry based on the provider.
|
||||
// If it isn't cached, it will search for the manga, create a ChapterContainer and cache it.
|
||||
func (r *Repository) GetMangaChapterContainer(opts *GetMangaChapterContainerOptions) (ret *ChapterContainer, err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/GetMangaChapterContainer", &err)
|
||||
|
||||
provider := opts.Provider
|
||||
mediaId := opts.MediaId
|
||||
titles := opts.Titles
|
||||
|
||||
providerExtension, ok := extension.GetExtension[extension.MangaProviderExtension](r.providerExtensionBank, provider)
|
||||
if !ok {
|
||||
r.logger.Error().Str("provider", provider).Msg("manga: Provider not found")
|
||||
return nil, errors.New("manga: Provider not found")
|
||||
}
|
||||
|
||||
// DEVNOTE: Local chapters can be cached
|
||||
localProvider, isLocalProvider := providerExtension.GetProvider().(*manga_providers.Local)
|
||||
|
||||
// Set the source directory for local provider
|
||||
if isLocalProvider && r.settings.Manga.LocalSourceDirectory != "" {
|
||||
localProvider.SetSourceDirectory(r.settings.Manga.LocalSourceDirectory)
|
||||
}
|
||||
|
||||
r.logger.Trace().
|
||||
Str("provider", provider).
|
||||
Int("mediaId", mediaId).
|
||||
Msgf("manga: Getting chapters")
|
||||
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(provider, mediaId)
|
||||
|
||||
// +---------------------+
|
||||
// | Hook event |
|
||||
// +---------------------+
|
||||
|
||||
// Trigger hook event
|
||||
reqEvent := &MangaChapterContainerRequestedEvent{
|
||||
Provider: provider,
|
||||
MediaId: mediaId,
|
||||
Titles: titles,
|
||||
Year: opts.Year,
|
||||
ChapterContainer: &ChapterContainer{
|
||||
MediaId: mediaId,
|
||||
Provider: provider,
|
||||
Chapters: []*hibikemanga.ChapterDetails{},
|
||||
},
|
||||
}
|
||||
err = hook.GlobalHookManager.OnMangaChapterContainerRequested().Trigger(reqEvent)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Exception occurred while triggering hook event")
|
||||
return nil, fmt.Errorf("manga: Error in hook, %w", err)
|
||||
}
|
||||
|
||||
// Default prevented, return the chapter container
|
||||
if reqEvent.DefaultPrevented {
|
||||
if reqEvent.ChapterContainer == nil {
|
||||
return nil, fmt.Errorf("manga: No chapter container returned by hook event")
|
||||
}
|
||||
return reqEvent.ChapterContainer, nil
|
||||
}
|
||||
|
||||
// +---------------------+
|
||||
// | Cache |
|
||||
// +---------------------+
|
||||
|
||||
var container *ChapterContainer
|
||||
containerBucket := r.getFcProviderBucket(provider, mediaId, bucketTypeChapter)
|
||||
|
||||
// Check if the container is in the cache
|
||||
if found, _ := r.fileCacher.Get(containerBucket, chapterContainerKey, &container); found {
|
||||
r.logger.Info().Str("bucket", containerBucket.Name()).Msg("manga: Chapter Container Cache HIT")
|
||||
|
||||
// Trigger hook event
|
||||
ev := &MangaChapterContainerEvent{
|
||||
ChapterContainer: container,
|
||||
}
|
||||
err = hook.GlobalHookManager.OnMangaChapterContainer().Trigger(ev)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Exception occurred while triggering hook event")
|
||||
}
|
||||
container = ev.ChapterContainer
|
||||
|
||||
return container, nil
|
||||
}
|
||||
|
||||
// Delete the map cache
|
||||
mangaLatestChapterNumberMap.Delete(ChapterCountMapCacheKey)
|
||||
|
||||
var mangaId string
|
||||
|
||||
// +---------------------+
|
||||
// | Database |
|
||||
// +---------------------+
|
||||
|
||||
// Search for the mapping in the database
|
||||
mapping, found := r.db.GetMangaMapping(provider, mediaId)
|
||||
if found {
|
||||
r.logger.Debug().Str("mangaId", mapping.MangaID).Msg("manga: Using manual mapping")
|
||||
mangaId = mapping.MangaID
|
||||
}
|
||||
|
||||
if mangaId == "" {
|
||||
// +---------------------+
|
||||
// | Search |
|
||||
// +---------------------+
|
||||
|
||||
r.logger.Trace().Msg("manga: Searching for manga")
|
||||
|
||||
if titles == nil {
|
||||
return nil, ErrNoTitlesProvided
|
||||
}
|
||||
|
||||
titles = lo.Filter(titles, func(title *string, _ int) bool {
|
||||
return util.IsMostlyLatinString(*title)
|
||||
})
|
||||
|
||||
var searchRes []*hibikemanga.SearchResult
|
||||
|
||||
var err error
|
||||
for _, title := range titles {
|
||||
var _searchRes []*hibikemanga.SearchResult
|
||||
|
||||
_searchRes, err = providerExtension.GetProvider().Search(hibikemanga.SearchOptions{
|
||||
Query: *title,
|
||||
Year: opts.Year,
|
||||
})
|
||||
if err == nil {
|
||||
|
||||
HydrateSearchResultSearchRating(_searchRes, title)
|
||||
|
||||
searchRes = append(searchRes, _searchRes...)
|
||||
} else {
|
||||
r.logger.Warn().Err(err).Msg("manga: Search failed")
|
||||
}
|
||||
}
|
||||
|
||||
if len(searchRes) == 0 {
|
||||
r.logger.Error().Msg("manga: No search results found")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w, %w", ErrNoResults, err)
|
||||
} else {
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
}
|
||||
|
||||
// Overwrite the provider just in case
|
||||
for _, res := range searchRes {
|
||||
res.Provider = provider
|
||||
}
|
||||
|
||||
bestRes := GetBestSearchResult(searchRes)
|
||||
|
||||
mangaId = bestRes.ID
|
||||
}
|
||||
|
||||
// +---------------------+
|
||||
// | Get chapters |
|
||||
// +---------------------+
|
||||
|
||||
chapterList, err := providerExtension.GetProvider().FindChapters(mangaId)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to get chapters")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
// Overwrite the provider just in case
|
||||
for _, chapter := range chapterList {
|
||||
chapter.Provider = provider
|
||||
}
|
||||
|
||||
container = &ChapterContainer{
|
||||
MediaId: mediaId,
|
||||
Provider: provider,
|
||||
Chapters: chapterList,
|
||||
}
|
||||
|
||||
// Trigger hook event
|
||||
ev := &MangaChapterContainerEvent{
|
||||
ChapterContainer: container,
|
||||
}
|
||||
err = hook.GlobalHookManager.OnMangaChapterContainer().Trigger(ev)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Exception occurred while triggering hook event")
|
||||
}
|
||||
container = ev.ChapterContainer
|
||||
|
||||
// Cache the container only if it has chapters
|
||||
if len(container.Chapters) > 0 {
|
||||
err = r.fileCacher.Set(containerBucket, chapterContainerKey, container)
|
||||
if err != nil {
|
||||
r.logger.Warn().Err(err).Msg("manga: Failed to populate cache")
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.Info().Str("bucket", containerBucket.Name()).Msg("manga: Retrieved chapters")
|
||||
return container, nil
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// RefreshChapterContainers deletes all cached chapter containers and refetches them based on the selected provider map.
|
||||
func (r *Repository) RefreshChapterContainers(mangaCollection *anilist.MangaCollection, selectedProviderMap map[int]string) (err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/RefreshChapterContainers", &err)
|
||||
|
||||
// Read the cache directory
|
||||
entries, err := os.ReadDir(r.cacheDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
removedMediaIds := make(map[int]struct{})
|
||||
mu := sync.Mutex{}
|
||||
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(len(entries))
|
||||
for _, entry := range entries {
|
||||
go func(entry os.DirEntry) {
|
||||
defer wg.Done()
|
||||
|
||||
if entry.IsDir() {
|
||||
return
|
||||
}
|
||||
|
||||
provider, bucketType, mediaId, ok := ParseChapterContainerFileName(entry.Name())
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
// If the bucket type is not chapter, skip
|
||||
if bucketType != bucketTypeChapter {
|
||||
return
|
||||
}
|
||||
|
||||
r.logger.Trace().Str("provider", provider).Int("mediaId", mediaId).Msg("manga: Refetching chapter container")
|
||||
|
||||
mu.Lock()
|
||||
// Remove the container from the cache if it hasn't been removed yet
|
||||
if _, ok := removedMediaIds[mediaId]; !ok {
|
||||
_ = r.EmptyMangaCache(mediaId)
|
||||
removedMediaIds[mediaId] = struct{}{}
|
||||
}
|
||||
mu.Unlock()
|
||||
|
||||
// If a selectedProviderMap is provided, check if the provider is in the map
|
||||
if selectedProviderMap != nil {
|
||||
// If the manga is not in the map, continue
|
||||
if _, ok := selectedProviderMap[mediaId]; !ok {
|
||||
return
|
||||
}
|
||||
|
||||
// If the provider is not the one selected, continue
|
||||
if selectedProviderMap[mediaId] != provider {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Get the manga from the collection
|
||||
mangaEntry, found := mangaCollection.GetListEntryFromMangaId(mediaId)
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
|
||||
// If the manga is not currently reading or repeating, continue
|
||||
if *mangaEntry.GetStatus() != anilist.MediaListStatusCurrent && *mangaEntry.GetStatus() != anilist.MediaListStatusRepeating {
|
||||
return
|
||||
}
|
||||
|
||||
// Refetch the container
|
||||
_, err = r.GetMangaChapterContainer(&GetMangaChapterContainerOptions{
|
||||
Provider: provider,
|
||||
MediaId: mediaId,
|
||||
Titles: mangaEntry.GetMedia().GetAllTitles(),
|
||||
Year: mangaEntry.GetMedia().GetStartYearSafe(),
|
||||
})
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to refetch chapter container")
|
||||
return
|
||||
}
|
||||
|
||||
r.logger.Trace().Str("provider", provider).Int("mediaId", mediaId).Msg("manga: Refetched chapter container")
|
||||
}(entry)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const ChapterCountMapCacheKey = 1
|
||||
|
||||
var mangaLatestChapterNumberMap = result.NewResultMap[int, map[int][]MangaLatestChapterNumberItem]()
|
||||
|
||||
type MangaLatestChapterNumberItem struct {
|
||||
Provider string `json:"provider"`
|
||||
Scanlator string `json:"scanlator"`
|
||||
Language string `json:"language"`
|
||||
Number int `json:"number"`
|
||||
}
|
||||
|
||||
// GetMangaLatestChapterNumbersMap retrieves the latest chapter number for all manga entries.
|
||||
// It scans the cache directory for chapter containers and counts the number of chapters fetched from the provider for each manga.
|
||||
//
|
||||
// Unlike [GetMangaLatestChapterNumberMap], it will segregate the chapter numbers by scanlator and language.
|
||||
func (r *Repository) GetMangaLatestChapterNumbersMap() (ret map[int][]MangaLatestChapterNumberItem, err error) {
|
||||
defer util.HandlePanicInModuleThen("manga/GetMangaLatestChapterNumbersMap", func() {})
|
||||
ret = make(map[int][]MangaLatestChapterNumberItem)
|
||||
|
||||
if m, ok := mangaLatestChapterNumberMap.Get(ChapterCountMapCacheKey); ok {
|
||||
ret = m
|
||||
return
|
||||
}
|
||||
|
||||
// Go through all chapter container caches
|
||||
entries, err := os.ReadDir(r.cacheDir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the provider and mediaId from the file cache name
|
||||
provider, mediaId, ok := parseChapterFileName(entry.Name())
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
containerBucket := r.getFcProviderBucket(provider, mediaId, bucketTypeChapter)
|
||||
|
||||
// Get the container from the file cache
|
||||
var container *ChapterContainer
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(provider, mediaId)
|
||||
if found, _ := r.fileCacher.Get(containerBucket, chapterContainerKey, &container); !found {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create groups
|
||||
groupByScanlator := lo.GroupBy(container.Chapters, func(c *hibikemanga.ChapterDetails) string {
|
||||
return c.Scanlator
|
||||
})
|
||||
|
||||
for scanlator, chapters := range groupByScanlator {
|
||||
groupByLanguage := lo.GroupBy(chapters, func(c *hibikemanga.ChapterDetails) string {
|
||||
return c.Language
|
||||
})
|
||||
|
||||
for language, chapters := range groupByLanguage {
|
||||
lastChapter := slices.MaxFunc(chapters, func(a *hibikemanga.ChapterDetails, b *hibikemanga.ChapterDetails) int {
|
||||
return cmp.Compare(a.Index, b.Index)
|
||||
})
|
||||
|
||||
chapterNumFloat, _ := strconv.ParseFloat(lastChapter.Chapter, 32)
|
||||
chapterCount := int(math.Floor(chapterNumFloat))
|
||||
|
||||
if _, ok := ret[mediaId]; !ok {
|
||||
ret[mediaId] = []MangaLatestChapterNumberItem{}
|
||||
}
|
||||
|
||||
ret[mediaId] = append(ret[mediaId], MangaLatestChapterNumberItem{
|
||||
Provider: provider,
|
||||
Scanlator: scanlator,
|
||||
Language: language,
|
||||
Number: chapterCount,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger hook event
|
||||
ev := &MangaLatestChapterNumbersMapEvent{
|
||||
LatestChapterNumbersMap: ret,
|
||||
}
|
||||
err = hook.GlobalHookManager.OnMangaLatestChapterNumbersMap().Trigger(ev)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Exception occurred while triggering hook event")
|
||||
}
|
||||
ret = ev.LatestChapterNumbersMap
|
||||
|
||||
mangaLatestChapterNumberMap.Set(ChapterCountMapCacheKey, ret)
|
||||
return
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
func parseChapterFileName(dirName string) (provider string, mId int, ok bool) {
|
||||
if !strings.HasPrefix(dirName, "manga_") {
|
||||
return "", 0, false
|
||||
}
|
||||
dirName = strings.TrimSuffix(dirName, ".cache")
|
||||
parts := strings.Split(dirName, "_")
|
||||
if len(parts) != 4 {
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
provider = parts[1]
|
||||
mId, err := strconv.Atoi(parts[3])
|
||||
if err != nil {
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
return provider, mId, true
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
func GetBestSearchResult(searchRes []*hibikemanga.SearchResult) *hibikemanga.SearchResult {
|
||||
bestRes := searchRes[0]
|
||||
for _, res := range searchRes {
|
||||
if res.SearchRating > bestRes.SearchRating {
|
||||
bestRes = res
|
||||
}
|
||||
}
|
||||
return bestRes
|
||||
}
|
||||
|
||||
// HydrateSearchResultSearchRating rates the search results based on the provided title
|
||||
// It checks if all search results have a rating of 0 and if so, it calculates ratings
|
||||
// using the Sorensen-Dice
|
||||
func HydrateSearchResultSearchRating(_searchRes []*hibikemanga.SearchResult, title *string) {
|
||||
// Rate the search results if all ratings are 0
|
||||
if noRatings := lo.EveryBy(_searchRes, func(res *hibikemanga.SearchResult) bool {
|
||||
return res.SearchRating == 0
|
||||
}); noRatings {
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(len(_searchRes))
|
||||
for _, res := range _searchRes {
|
||||
go func(res *hibikemanga.SearchResult) {
|
||||
defer wg.Done()
|
||||
|
||||
compTitles := []*string{&res.Title}
|
||||
if res.Synonyms == nil || len(res.Synonyms) == 0 {
|
||||
return
|
||||
}
|
||||
for _, syn := range res.Synonyms {
|
||||
compTitles = append(compTitles, &syn)
|
||||
}
|
||||
|
||||
compRes, ok := comparison.FindBestMatchWithSorensenDice(title, compTitles)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
res.SearchRating = compRes.Rating
|
||||
return
|
||||
}(res)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
}
|
||||
15
seanime-2.9.10/internal/manga/chapter_container_helpers.go
Normal file
15
seanime-2.9.10/internal/manga/chapter_container_helpers.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
)
|
||||
|
||||
// GetChapter returns a chapter from the container
|
||||
func (cc *ChapterContainer) GetChapter(id string) (ret *hibikemanga.ChapterDetails, found bool) {
|
||||
for _, c := range cc.Chapters {
|
||||
if c.ID == id {
|
||||
return c, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
120
seanime-2.9.10/internal/manga/chapter_container_mapping.go
Normal file
120
seanime-2.9.10/internal/manga/chapter_container_mapping.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"seanime/internal/extension"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/result"
|
||||
"strings"
|
||||
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
)
|
||||
|
||||
var searchResultCache = result.NewCache[string, []*hibikemanga.SearchResult]()
|
||||
|
||||
func (r *Repository) ManualSearch(provider string, query string) (ret []*hibikemanga.SearchResult, err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/ManualSearch", &err)
|
||||
|
||||
if query == "" {
|
||||
return make([]*hibikemanga.SearchResult, 0), nil
|
||||
}
|
||||
|
||||
// Get the search results
|
||||
providerExtension, ok := extension.GetExtension[extension.MangaProviderExtension](r.providerExtensionBank, provider)
|
||||
if !ok {
|
||||
r.logger.Error().Str("provider", provider).Msg("manga: Provider not found")
|
||||
return nil, errors.New("manga: Provider not found")
|
||||
}
|
||||
|
||||
normalizedQuery := strings.ToLower(strings.TrimSpace(query))
|
||||
|
||||
searchRes, found := searchResultCache.Get(provider + normalizedQuery)
|
||||
if found {
|
||||
return searchRes, nil
|
||||
}
|
||||
|
||||
searchRes, err = providerExtension.GetProvider().Search(hibikemanga.SearchOptions{
|
||||
Query: normalizedQuery,
|
||||
})
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Str("query", normalizedQuery).Msg("manga: Search failed")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Overwrite the provider just in case
|
||||
for _, res := range searchRes {
|
||||
res.Provider = provider
|
||||
}
|
||||
|
||||
searchResultCache.Set(provider+normalizedQuery, searchRes)
|
||||
|
||||
return searchRes, nil
|
||||
}
|
||||
|
||||
// ManualMapping is used to manually map a manga to a provider.
|
||||
// After calling this, the client should re-fetch the chapter container.
|
||||
func (r *Repository) ManualMapping(provider string, mediaId int, mangaId string) (err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/ManualMapping", &err)
|
||||
|
||||
r.logger.Trace().Msgf("manga: Removing cached bucket for %s, media ID: %d", provider, mediaId)
|
||||
|
||||
// Delete the cached chapter container if any
|
||||
bucket := r.getFcProviderBucket(provider, mediaId, bucketTypeChapter)
|
||||
_ = r.fileCacher.Remove(bucket.Name())
|
||||
|
||||
r.logger.Trace().
|
||||
Str("provider", provider).
|
||||
Int("mediaId", mediaId).
|
||||
Str("mangaId", mangaId).
|
||||
Msg("manga: Manual mapping")
|
||||
|
||||
// Insert the mapping into the database
|
||||
err = r.db.InsertMangaMapping(provider, mediaId, mangaId)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to insert mapping")
|
||||
return err
|
||||
}
|
||||
|
||||
r.logger.Debug().Msg("manga: Manual mapping successful")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type MappingResponse struct {
|
||||
MangaID *string `json:"mangaId"`
|
||||
}
|
||||
|
||||
func (r *Repository) GetMapping(provider string, mediaId int) (ret MappingResponse) {
|
||||
defer util.HandlePanicInModuleThen("manga/GetMapping", func() {
|
||||
ret = MappingResponse{}
|
||||
})
|
||||
|
||||
mapping, found := r.db.GetMangaMapping(provider, mediaId)
|
||||
if !found {
|
||||
return MappingResponse{}
|
||||
}
|
||||
|
||||
return MappingResponse{
|
||||
MangaID: &mapping.MangaID,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Repository) RemoveMapping(provider string, mediaId int) (err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/RemoveMapping", &err)
|
||||
|
||||
// Delete the mapping from the database
|
||||
err = r.db.DeleteMangaMapping(provider, mediaId)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to delete mapping")
|
||||
return err
|
||||
}
|
||||
|
||||
r.logger.Debug().Msg("manga: Mapping removed")
|
||||
|
||||
r.logger.Trace().Msgf("manga: Removing cached bucket for %s, media ID: %d", provider, mediaId)
|
||||
// Delete the cached chapter container if any
|
||||
bucket := r.getFcProviderBucket(provider, mediaId, bucketTypeChapter)
|
||||
_ = r.fileCacher.Remove(bucket.Name())
|
||||
|
||||
return nil
|
||||
}
|
||||
240
seanime-2.9.10/internal/manga/chapter_page_container.go
Normal file
240
seanime-2.9.10/internal/manga/chapter_page_container.go
Normal file
@@ -0,0 +1,240 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"seanime/internal/extension"
|
||||
manga_providers "seanime/internal/manga/providers"
|
||||
"seanime/internal/util"
|
||||
"sync"
|
||||
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
)
|
||||
|
||||
type (
|
||||
// PageContainer is used to display the list of pages from a chapter in the client.
|
||||
// It is cached in the file cache bucket with a key of the format: {provider}${mediaId}${chapterId}
|
||||
PageContainer struct {
|
||||
MediaId int `json:"mediaId"`
|
||||
Provider string `json:"provider"`
|
||||
ChapterId string `json:"chapterId"`
|
||||
Pages []*hibikemanga.ChapterPage `json:"pages"`
|
||||
PageDimensions map[int]*PageDimension `json:"pageDimensions"` // Indexed by page number
|
||||
IsDownloaded bool `json:"isDownloaded"` // TODO remove
|
||||
}
|
||||
|
||||
// PageDimension is used to store the dimensions of a page.
|
||||
// It is used by the client for 'Double Page' mode.
|
||||
PageDimension struct {
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
}
|
||||
)
|
||||
|
||||
// GetMangaPageContainer returns the PageContainer for a manga chapter based on the provider.
|
||||
func (r *Repository) GetMangaPageContainer(
|
||||
provider string,
|
||||
mediaId int,
|
||||
chapterId string,
|
||||
doublePage bool,
|
||||
isOffline *bool,
|
||||
) (ret *PageContainer, err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/GetMangaPageContainer", &err)
|
||||
|
||||
// +---------------------+
|
||||
// | Downloads |
|
||||
// +---------------------+
|
||||
|
||||
providerExtension, ok := extension.GetExtension[extension.MangaProviderExtension](r.providerExtensionBank, provider)
|
||||
if !ok {
|
||||
r.logger.Error().Str("provider", provider).Msg("manga: Provider not found")
|
||||
return nil, errors.New("manga: Provider not found")
|
||||
}
|
||||
|
||||
_, isLocalProvider := providerExtension.GetProvider().(*manga_providers.Local)
|
||||
|
||||
if *isOffline && !isLocalProvider {
|
||||
ret, err = r.getDownloadedMangaPageContainer(provider, mediaId, chapterId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
if !isLocalProvider {
|
||||
ret, _ = r.getDownloadedMangaPageContainer(provider, mediaId, chapterId)
|
||||
if ret != nil {
|
||||
return ret, nil
|
||||
}
|
||||
}
|
||||
|
||||
// +---------------------+
|
||||
// | Get Pages |
|
||||
// +---------------------+
|
||||
|
||||
// PageContainer key
|
||||
pageContainerKey := fmt.Sprintf("%s$%d$%s", provider, mediaId, chapterId)
|
||||
|
||||
r.logger.Trace().
|
||||
Str("provider", provider).
|
||||
Int("mediaId", mediaId).
|
||||
Str("key", pageContainerKey).
|
||||
Str("chapterId", chapterId).
|
||||
Msgf("manga: Getting pages")
|
||||
|
||||
// +---------------------+
|
||||
// | Cache |
|
||||
// +---------------------+
|
||||
|
||||
var container *PageContainer
|
||||
|
||||
// PageContainer bucket
|
||||
// e.g., manga_comick_pages_123
|
||||
// -> { "comick$123$10010": PageContainer }, { "comick$123$10011": PageContainer }
|
||||
pageBucket := r.getFcProviderBucket(provider, mediaId, bucketTypePage)
|
||||
|
||||
// Check if the container is in the cache
|
||||
if found, _ := r.fileCacher.Get(pageBucket, pageContainerKey, &container); found && !isLocalProvider {
|
||||
|
||||
// Hydrate page dimensions
|
||||
pageDimensions, _ := r.getPageDimensions(doublePage, provider, mediaId, chapterId, container.Pages)
|
||||
container.PageDimensions = pageDimensions
|
||||
|
||||
r.logger.Debug().Str("key", pageContainerKey).Msg("manga: Page Container Cache HIT")
|
||||
return container, nil
|
||||
}
|
||||
|
||||
// +---------------------+
|
||||
// | Fetch pages |
|
||||
// +---------------------+
|
||||
|
||||
// Search for the chapter in the cache
|
||||
containerBucket := r.getFcProviderBucket(provider, mediaId, bucketTypeChapter)
|
||||
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(provider, mediaId)
|
||||
|
||||
var chapterContainer *ChapterContainer
|
||||
if found, _ := r.fileCacher.Get(containerBucket, chapterContainerKey, &chapterContainer); !found {
|
||||
r.logger.Error().Msg("manga: Chapter Container not found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
// Get the chapter from the container
|
||||
var chapter *hibikemanga.ChapterDetails
|
||||
for _, c := range chapterContainer.Chapters {
|
||||
if c.ID == chapterId {
|
||||
chapter = c
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if chapter == nil {
|
||||
r.logger.Error().Msg("manga: Chapter not found")
|
||||
return nil, ErrChapterNotFound
|
||||
}
|
||||
|
||||
// Get the chapter pages
|
||||
var pages []*hibikemanga.ChapterPage
|
||||
|
||||
pages, err = providerExtension.GetProvider().FindChapterPages(chapter.ID)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Could not get chapter pages")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pages == nil || len(pages) == 0 {
|
||||
r.logger.Error().Msg("manga: No pages found")
|
||||
return nil, fmt.Errorf("manga: No pages found")
|
||||
}
|
||||
|
||||
// Overwrite provider just in case
|
||||
for _, page := range pages {
|
||||
page.Provider = provider
|
||||
}
|
||||
|
||||
pageDimensions, _ := r.getPageDimensions(doublePage, provider, mediaId, chapterId, pages)
|
||||
|
||||
container = &PageContainer{
|
||||
MediaId: mediaId,
|
||||
Provider: provider,
|
||||
ChapterId: chapterId,
|
||||
Pages: pages,
|
||||
PageDimensions: pageDimensions,
|
||||
IsDownloaded: false,
|
||||
}
|
||||
|
||||
// Set cache only if not local provider
|
||||
if !isLocalProvider {
|
||||
err = r.fileCacher.Set(pageBucket, pageContainerKey, container)
|
||||
if err != nil {
|
||||
r.logger.Warn().Err(err).Msg("manga: Failed to populate cache")
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.Debug().Str("key", pageContainerKey).Msg("manga: Retrieved pages")
|
||||
|
||||
return container, nil
|
||||
}
|
||||
|
||||
func (r *Repository) getPageDimensions(enabled bool, provider string, mediaId int, chapterId string, pages []*hibikemanga.ChapterPage) (ret map[int]*PageDimension, err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/getPageDimensions", &err)
|
||||
|
||||
if !enabled {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// e.g. comick$123$10010
|
||||
key := fmt.Sprintf("%s$%d$%s", provider, mediaId, chapterId)
|
||||
|
||||
// Page dimensions bucket
|
||||
// e.g., manga_comick_page-dimensions_123
|
||||
// -> { "comick$123$10010": PageDimensions }, { "comick$123$10011": PageDimensions }
|
||||
dimensionBucket := r.getFcProviderBucket(provider, mediaId, bucketTypePageDimensions)
|
||||
|
||||
if found, _ := r.fileCacher.Get(dimensionBucket, key, &ret); found {
|
||||
r.logger.Debug().Str("key", key).Msg("manga: Page Dimensions Cache HIT")
|
||||
return
|
||||
}
|
||||
|
||||
r.logger.Trace().Str("key", key).Msg("manga: Getting page dimensions")
|
||||
|
||||
// Get the page dimensions
|
||||
pageDimensions := make(map[int]*PageDimension)
|
||||
mu := sync.Mutex{}
|
||||
wg := sync.WaitGroup{}
|
||||
for _, page := range pages {
|
||||
wg.Add(1)
|
||||
go func(page *hibikemanga.ChapterPage) {
|
||||
defer wg.Done()
|
||||
var buf []byte
|
||||
if page.Buf != nil {
|
||||
buf = page.Buf
|
||||
} else {
|
||||
buf, err = manga_providers.GetImageByProxy(page.URL, page.Headers)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
width, height, err := getImageNaturalSizeB(buf)
|
||||
if err != nil {
|
||||
//r.logger.Warn().Err(err).Int("index", page.Index).Msg("manga: failed to get image size")
|
||||
return
|
||||
}
|
||||
|
||||
mu.Lock()
|
||||
// DEVNOTE: Index by page index
|
||||
pageDimensions[page.Index] = &PageDimension{
|
||||
Width: width,
|
||||
Height: height,
|
||||
}
|
||||
mu.Unlock()
|
||||
}(page)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
_ = r.fileCacher.Set(dimensionBucket, key, pageDimensions)
|
||||
|
||||
r.logger.Info().Str("bucket", dimensionBucket.Name()).Msg("manga: Retrieved page dimensions")
|
||||
|
||||
return pageDimensions, nil
|
||||
}
|
||||
147
seanime-2.9.10/internal/manga/collection.go
Normal file
147
seanime-2.9.10/internal/manga/collection.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/hook"
|
||||
"seanime/internal/platforms/platform"
|
||||
"slices"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"github.com/sourcegraph/conc/pool"
|
||||
)
|
||||
|
||||
type (
|
||||
CollectionStatusType string
|
||||
|
||||
Collection struct {
|
||||
Lists []*CollectionList `json:"lists"`
|
||||
}
|
||||
|
||||
CollectionList struct {
|
||||
Type anilist.MediaListStatus `json:"type"`
|
||||
Status anilist.MediaListStatus `json:"status"`
|
||||
Entries []*CollectionEntry `json:"entries"`
|
||||
}
|
||||
|
||||
CollectionEntry struct {
|
||||
Media *anilist.BaseManga `json:"media"`
|
||||
MediaId int `json:"mediaId"`
|
||||
EntryListData *EntryListData `json:"listData"` // AniList list data
|
||||
}
|
||||
)
|
||||
|
||||
type (
|
||||
NewCollectionOptions struct {
|
||||
MangaCollection *anilist.MangaCollection
|
||||
Platform platform.Platform
|
||||
}
|
||||
)
|
||||
|
||||
func NewCollection(opts *NewCollectionOptions) (collection *Collection, err error) {
|
||||
coll := &Collection{}
|
||||
if opts.MangaCollection == nil {
|
||||
return nil, nil
|
||||
}
|
||||
if opts.Platform == nil {
|
||||
return nil, fmt.Errorf("platform is nil")
|
||||
}
|
||||
|
||||
optsEvent := new(MangaLibraryCollectionRequestedEvent)
|
||||
optsEvent.MangaCollection = opts.MangaCollection
|
||||
err = hook.GlobalHookManager.OnMangaLibraryCollectionRequested().Trigger(optsEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts.MangaCollection = optsEvent.MangaCollection
|
||||
|
||||
aniLists := opts.MangaCollection.GetMediaListCollection().GetLists()
|
||||
|
||||
aniLists = lo.Filter(aniLists, func(list *anilist.MangaList, _ int) bool {
|
||||
return list.Status != nil
|
||||
})
|
||||
|
||||
p := pool.NewWithResults[*CollectionList]()
|
||||
for _, list := range aniLists {
|
||||
p.Go(func() *CollectionList {
|
||||
|
||||
if list.Status == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
entries := list.GetEntries()
|
||||
|
||||
p2 := pool.NewWithResults[*CollectionEntry]()
|
||||
for _, entry := range entries {
|
||||
p2.Go(func() *CollectionEntry {
|
||||
|
||||
return &CollectionEntry{
|
||||
Media: entry.GetMedia(),
|
||||
MediaId: entry.GetMedia().GetID(),
|
||||
EntryListData: &EntryListData{
|
||||
Progress: *entry.Progress,
|
||||
Score: *entry.Score,
|
||||
Status: entry.Status,
|
||||
Repeat: entry.GetRepeatSafe(),
|
||||
StartedAt: anilist.FuzzyDateToString(entry.StartedAt),
|
||||
CompletedAt: anilist.FuzzyDateToString(entry.CompletedAt),
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
collectionEntries := p2.Wait()
|
||||
|
||||
slices.SortFunc(collectionEntries, func(i, j *CollectionEntry) int {
|
||||
return cmp.Compare(i.Media.GetTitleSafe(), j.Media.GetTitleSafe())
|
||||
})
|
||||
|
||||
return &CollectionList{
|
||||
Type: getCollectionEntryFromListStatus(*list.Status),
|
||||
Status: *list.Status,
|
||||
Entries: collectionEntries,
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
lists := p.Wait()
|
||||
|
||||
lists = lo.Filter(lists, func(l *CollectionList, _ int) bool {
|
||||
return l != nil
|
||||
})
|
||||
|
||||
// Merge repeating to current (no need to show repeating as a separate list)
|
||||
repeat, ok := lo.Find(lists, func(item *CollectionList) bool {
|
||||
return item.Status == anilist.MediaListStatusRepeating
|
||||
})
|
||||
if ok {
|
||||
current, ok := lo.Find(lists, func(item *CollectionList) bool {
|
||||
return item.Status == anilist.MediaListStatusCurrent
|
||||
})
|
||||
if len(repeat.Entries) > 0 && ok {
|
||||
current.Entries = append(current.Entries, repeat.Entries...)
|
||||
}
|
||||
// Remove repeating from lists
|
||||
lists = lo.Filter(lists, func(item *CollectionList, index int) bool {
|
||||
return item.Status != anilist.MediaListStatusRepeating
|
||||
})
|
||||
}
|
||||
|
||||
coll.Lists = lists
|
||||
|
||||
event := new(MangaLibraryCollectionEvent)
|
||||
event.LibraryCollection = coll
|
||||
_ = hook.GlobalHookManager.OnMangaLibraryCollection().Trigger(event)
|
||||
coll = event.LibraryCollection
|
||||
|
||||
return coll, nil
|
||||
}
|
||||
|
||||
func getCollectionEntryFromListStatus(st anilist.MediaListStatus) anilist.MediaListStatus {
|
||||
if st == anilist.MediaListStatusRepeating {
|
||||
return anilist.MediaListStatusCurrent
|
||||
}
|
||||
|
||||
return st
|
||||
}
|
||||
47
seanime-2.9.10/internal/manga/collection_test.go
Normal file
47
seanime-2.9.10/internal/manga/collection_test.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"context"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/platforms/anilist_platform"
|
||||
"seanime/internal/test_utils"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewCollection(t *testing.T) {
|
||||
test_utils.SetTwoLevelDeep()
|
||||
test_utils.InitTestProvider(t, test_utils.Anilist())
|
||||
|
||||
anilistClient := anilist.TestGetMockAnilistClient()
|
||||
logger := util.NewLogger()
|
||||
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClient, logger)
|
||||
|
||||
mangaCollection, err := anilistClient.MangaCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get manga collection: %v", err)
|
||||
}
|
||||
|
||||
opts := &NewCollectionOptions{
|
||||
MangaCollection: mangaCollection,
|
||||
Platform: anilistPlatform,
|
||||
}
|
||||
|
||||
collection, err := NewCollection(opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create collection: %v", err)
|
||||
}
|
||||
|
||||
if len(collection.Lists) == 0 {
|
||||
t.Skip("No lists found")
|
||||
}
|
||||
|
||||
for _, list := range collection.Lists {
|
||||
t.Logf("List: %s", list.Type)
|
||||
for _, entry := range list.Entries {
|
||||
t.Logf("\tEntry: %s", entry.Media.GetPreferredTitle())
|
||||
t.Logf("\t\tProgress: %d", entry.EntryListData.Progress)
|
||||
}
|
||||
t.Log("---------------------------------------")
|
||||
}
|
||||
}
|
||||
463
seanime-2.9.10/internal/manga/download.go
Normal file
463
seanime-2.9.10/internal/manga/download.go
Normal file
@@ -0,0 +1,463 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/database/db"
|
||||
"seanime/internal/database/models"
|
||||
"seanime/internal/events"
|
||||
"seanime/internal/hook"
|
||||
chapter_downloader "seanime/internal/manga/downloader"
|
||||
manga_providers "seanime/internal/manga/providers"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/filecache"
|
||||
"sync"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
Downloader struct {
|
||||
logger *zerolog.Logger
|
||||
wsEventManager events.WSEventManagerInterface
|
||||
database *db.Database
|
||||
downloadDir string
|
||||
chapterDownloader *chapter_downloader.Downloader
|
||||
repository *Repository
|
||||
filecacher *filecache.Cacher
|
||||
|
||||
mediaMap *MediaMap // Refreshed on start and after each download
|
||||
mediaMapMu sync.RWMutex
|
||||
|
||||
chapterDownloadedCh chan chapter_downloader.DownloadID
|
||||
readingDownloadDir bool
|
||||
isOffline *bool
|
||||
}
|
||||
|
||||
// MediaMap is created after reading the download directory.
|
||||
// It is used to store all downloaded chapters for each media.
|
||||
// The key is the media ID and the value is a map of provider to a list of chapters.
|
||||
//
|
||||
// e.g., downloadDir/comick_1234_abc_13/
|
||||
// downloadDir/comick_1234_def_13.5/
|
||||
// -> { 1234: { "comick": [ { "chapterId": "abc", "chapterNumber": "13" }, { "chapterId": "def", "chapterNumber": "13.5" } ] } }
|
||||
MediaMap map[int]ProviderDownloadMap
|
||||
|
||||
// ProviderDownloadMap is used to store all downloaded chapters for a specific media and provider.
|
||||
// The key is the provider and the value is a list of chapters.
|
||||
ProviderDownloadMap map[string][]ProviderDownloadMapChapterInfo
|
||||
|
||||
ProviderDownloadMapChapterInfo struct {
|
||||
ChapterID string `json:"chapterId"`
|
||||
ChapterNumber string `json:"chapterNumber"`
|
||||
}
|
||||
|
||||
MediaDownloadData struct {
|
||||
Downloaded ProviderDownloadMap `json:"downloaded"`
|
||||
Queued ProviderDownloadMap `json:"queued"`
|
||||
}
|
||||
)
|
||||
|
||||
type (
|
||||
NewDownloaderOptions struct {
|
||||
Database *db.Database
|
||||
Logger *zerolog.Logger
|
||||
WSEventManager events.WSEventManagerInterface
|
||||
DownloadDir string
|
||||
Repository *Repository
|
||||
IsOffline *bool
|
||||
}
|
||||
|
||||
DownloadChapterOptions struct {
|
||||
Provider string
|
||||
MediaId int
|
||||
ChapterId string
|
||||
StartNow bool
|
||||
}
|
||||
)
|
||||
|
||||
func NewDownloader(opts *NewDownloaderOptions) *Downloader {
|
||||
_ = os.MkdirAll(opts.DownloadDir, os.ModePerm)
|
||||
filecacher, _ := filecache.NewCacher(opts.DownloadDir)
|
||||
|
||||
d := &Downloader{
|
||||
logger: opts.Logger,
|
||||
wsEventManager: opts.WSEventManager,
|
||||
database: opts.Database,
|
||||
downloadDir: opts.DownloadDir,
|
||||
repository: opts.Repository,
|
||||
mediaMap: new(MediaMap),
|
||||
filecacher: filecacher,
|
||||
isOffline: opts.IsOffline,
|
||||
}
|
||||
|
||||
d.chapterDownloader = chapter_downloader.NewDownloader(&chapter_downloader.NewDownloaderOptions{
|
||||
Logger: opts.Logger,
|
||||
WSEventManager: opts.WSEventManager,
|
||||
Database: opts.Database,
|
||||
DownloadDir: opts.DownloadDir,
|
||||
})
|
||||
|
||||
go d.hydrateMediaMap()
|
||||
|
||||
return d
|
||||
}
|
||||
|
||||
// Start is called once to start the Chapter downloader 's main goroutine.
|
||||
func (d *Downloader) Start() {
|
||||
d.chapterDownloader.Start()
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
// Listen for downloaded chapters
|
||||
case downloadId := <-d.chapterDownloader.ChapterDownloaded():
|
||||
if d.isOffline != nil && *d.isOffline {
|
||||
continue
|
||||
}
|
||||
|
||||
// When a chapter is downloaded, fetch the chapter container from the file cache
|
||||
// and store it in the permanent bucket.
|
||||
// DEVNOTE: This will be useful to avoid re-fetching the chapter container when the cache expires.
|
||||
// This is deleted when a chapter is deleted.
|
||||
go func() {
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(downloadId.Provider, downloadId.MediaId)
|
||||
chapterContainer, found := d.repository.getChapterContainerFromFilecache(downloadId.Provider, downloadId.MediaId)
|
||||
if found {
|
||||
// Store the chapter container in the permanent bucket
|
||||
permBucket := getPermanentChapterContainerCacheBucket(downloadId.Provider, downloadId.MediaId)
|
||||
_ = d.filecacher.SetPerm(permBucket, chapterContainerKey, chapterContainer)
|
||||
}
|
||||
}()
|
||||
|
||||
// Refresh the media map when a chapter is downloaded
|
||||
d.hydrateMediaMap()
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// The bucket for storing downloaded chapter containers.
|
||||
// e.g. manga_downloaded_comick_chapters_1234
|
||||
// The key is the chapter ID.
|
||||
func getPermanentChapterContainerCacheBucket(provider string, mId int) filecache.PermanentBucket {
|
||||
return filecache.NewPermanentBucket(fmt.Sprintf("manga_downloaded_%s_chapters_%d", provider, mId))
|
||||
}
|
||||
|
||||
// getChapterContainerFromFilecache returns the chapter container from the temporary file cache.
|
||||
func (r *Repository) getChapterContainerFromFilecache(provider string, mId int) (*ChapterContainer, bool) {
|
||||
// Find chapter container in the file cache
|
||||
chapterBucket := r.getFcProviderBucket(provider, mId, bucketTypeChapter)
|
||||
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(provider, mId)
|
||||
|
||||
var chapterContainer *ChapterContainer
|
||||
// Get the key-value pair in the bucket
|
||||
if found, _ := r.fileCacher.Get(chapterBucket, chapterContainerKey, &chapterContainer); !found {
|
||||
// If the chapter container is not found, return an error
|
||||
// since it means that it wasn't fetched (for some reason) -- This shouldn't happen
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return chapterContainer, true
|
||||
}
|
||||
|
||||
// getChapterContainerFromPermanentFilecache returns the chapter container from the permanent file cache.
|
||||
func (r *Repository) getChapterContainerFromPermanentFilecache(provider string, mId int) (*ChapterContainer, bool) {
|
||||
permBucket := getPermanentChapterContainerCacheBucket(provider, mId)
|
||||
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(provider, mId)
|
||||
|
||||
var chapterContainer *ChapterContainer
|
||||
// Get the key-value pair in the bucket
|
||||
if found, _ := r.fileCacher.GetPerm(permBucket, chapterContainerKey, &chapterContainer); !found {
|
||||
// If the chapter container is not found, return an error
|
||||
// since it means that it wasn't fetched (for some reason) -- This shouldn't happen
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return chapterContainer, true
|
||||
}
|
||||
|
||||
// DownloadChapter is called by the client to download a chapter.
|
||||
// It fetches the chapter pages by using Repository.GetMangaPageContainer
|
||||
// and invokes the chapter_downloader.Downloader 'Download' method to add the chapter to the download queue.
|
||||
func (d *Downloader) DownloadChapter(opts DownloadChapterOptions) error {
|
||||
|
||||
if d.isOffline != nil && *d.isOffline {
|
||||
return errors.New("manga downloader: Manga downloader is in offline mode")
|
||||
}
|
||||
|
||||
chapterContainer, found := d.repository.getChapterContainerFromFilecache(opts.Provider, opts.MediaId)
|
||||
if !found {
|
||||
return errors.New("chapters not found")
|
||||
}
|
||||
|
||||
// Find the chapter in the chapter container
|
||||
// e.g. Wind-Breaker$0062
|
||||
chapter, ok := chapterContainer.GetChapter(opts.ChapterId)
|
||||
if !ok {
|
||||
return errors.New("chapter not found")
|
||||
}
|
||||
|
||||
// Fetch the chapter pages
|
||||
pageContainer, err := d.repository.GetMangaPageContainer(opts.Provider, opts.MediaId, opts.ChapterId, false, &[]bool{false}[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Add the chapter to the download queue
|
||||
return d.chapterDownloader.AddToQueue(chapter_downloader.DownloadOptions{
|
||||
DownloadID: chapter_downloader.DownloadID{
|
||||
Provider: opts.Provider,
|
||||
MediaId: opts.MediaId,
|
||||
ChapterId: opts.ChapterId,
|
||||
ChapterNumber: manga_providers.GetNormalizedChapter(chapter.Chapter),
|
||||
},
|
||||
Pages: pageContainer.Pages,
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteChapter is called by the client to delete a downloaded chapter.
|
||||
func (d *Downloader) DeleteChapter(provider string, mediaId int, chapterId string, chapterNumber string) (err error) {
|
||||
err = d.chapterDownloader.DeleteChapter(chapter_downloader.DownloadID{
|
||||
Provider: provider,
|
||||
MediaId: mediaId,
|
||||
ChapterId: chapterId,
|
||||
ChapterNumber: chapterNumber,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
permBucket := getPermanentChapterContainerCacheBucket(provider, mediaId)
|
||||
_ = d.filecacher.DeletePerm(permBucket, chapterId)
|
||||
|
||||
d.hydrateMediaMap()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteChapters is called by the client to delete downloaded chapters.
|
||||
func (d *Downloader) DeleteChapters(ids []chapter_downloader.DownloadID) (err error) {
|
||||
for _, id := range ids {
|
||||
err = d.chapterDownloader.DeleteChapter(chapter_downloader.DownloadID{
|
||||
Provider: id.Provider,
|
||||
MediaId: id.MediaId,
|
||||
ChapterId: id.ChapterId,
|
||||
ChapterNumber: id.ChapterNumber,
|
||||
})
|
||||
|
||||
permBucket := getPermanentChapterContainerCacheBucket(id.Provider, id.MediaId)
|
||||
_ = d.filecacher.DeletePerm(permBucket, id.ChapterId)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.hydrateMediaMap()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Downloader) GetMediaDownloads(mediaId int, cached bool) (ret MediaDownloadData, err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/GetMediaDownloads", &err)
|
||||
|
||||
if !cached {
|
||||
d.hydrateMediaMap()
|
||||
}
|
||||
|
||||
return d.mediaMap.getMediaDownload(mediaId, d.database)
|
||||
}
|
||||
|
||||
func (d *Downloader) RunChapterDownloadQueue() {
|
||||
d.chapterDownloader.Run()
|
||||
}
|
||||
|
||||
func (d *Downloader) StopChapterDownloadQueue() {
|
||||
_ = d.database.ResetDownloadingChapterDownloadQueueItems()
|
||||
d.chapterDownloader.Stop()
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
type (
|
||||
NewDownloadListOptions struct {
|
||||
MangaCollection *anilist.MangaCollection
|
||||
}
|
||||
|
||||
DownloadListItem struct {
|
||||
MediaId int `json:"mediaId"`
|
||||
// Media will be nil if the manga is no longer in the user's collection.
|
||||
// The client should handle this case by displaying the download data without the media data.
|
||||
Media *anilist.BaseManga `json:"media"`
|
||||
DownloadData ProviderDownloadMap `json:"downloadData"`
|
||||
}
|
||||
)
|
||||
|
||||
// NewDownloadList returns a list of DownloadListItem for the client to display.
|
||||
func (d *Downloader) NewDownloadList(opts *NewDownloadListOptions) (ret []*DownloadListItem, err error) {
|
||||
defer util.HandlePanicInModuleWithError("manga/NewDownloadList", &err)
|
||||
|
||||
mm := d.mediaMap
|
||||
|
||||
ret = make([]*DownloadListItem, 0)
|
||||
|
||||
for mId, data := range *mm {
|
||||
listEntry, ok := opts.MangaCollection.GetListEntryFromMangaId(mId)
|
||||
if !ok {
|
||||
ret = append(ret, &DownloadListItem{
|
||||
MediaId: mId,
|
||||
Media: nil,
|
||||
DownloadData: data,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
media := listEntry.GetMedia()
|
||||
if media == nil {
|
||||
ret = append(ret, &DownloadListItem{
|
||||
MediaId: mId,
|
||||
Media: nil,
|
||||
DownloadData: data,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
item := &DownloadListItem{
|
||||
MediaId: mId,
|
||||
Media: media,
|
||||
DownloadData: data,
|
||||
}
|
||||
|
||||
ret = append(ret, item)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Media map
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
func (mm *MediaMap) getMediaDownload(mediaId int, db *db.Database) (MediaDownloadData, error) {
|
||||
|
||||
if mm == nil {
|
||||
return MediaDownloadData{}, errors.New("could not check downloaded chapters")
|
||||
}
|
||||
|
||||
// Get all downloaded chapters for the media
|
||||
downloads, ok := (*mm)[mediaId]
|
||||
if !ok {
|
||||
downloads = make(map[string][]ProviderDownloadMapChapterInfo)
|
||||
}
|
||||
|
||||
// Get all queued chapters for the media
|
||||
queued, err := db.GetMediaQueuedChapters(mediaId)
|
||||
if err != nil {
|
||||
queued = make([]*models.ChapterDownloadQueueItem, 0)
|
||||
}
|
||||
|
||||
qm := make(ProviderDownloadMap)
|
||||
for _, item := range queued {
|
||||
if _, ok := qm[item.Provider]; !ok {
|
||||
qm[item.Provider] = []ProviderDownloadMapChapterInfo{
|
||||
{
|
||||
ChapterID: item.ChapterID,
|
||||
ChapterNumber: item.ChapterNumber,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
qm[item.Provider] = append(qm[item.Provider], ProviderDownloadMapChapterInfo{
|
||||
ChapterID: item.ChapterID,
|
||||
ChapterNumber: item.ChapterNumber,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
data := MediaDownloadData{
|
||||
Downloaded: downloads,
|
||||
Queued: qm,
|
||||
}
|
||||
|
||||
return data, nil
|
||||
|
||||
}
|
||||
|
||||
// hydrateMediaMap hydrates the MediaMap by reading the download directory.
|
||||
func (d *Downloader) hydrateMediaMap() {
|
||||
|
||||
if d.readingDownloadDir {
|
||||
return
|
||||
}
|
||||
|
||||
d.mediaMapMu.Lock()
|
||||
defer d.mediaMapMu.Unlock()
|
||||
|
||||
d.readingDownloadDir = true
|
||||
defer func() {
|
||||
d.readingDownloadDir = false
|
||||
}()
|
||||
|
||||
d.logger.Debug().Msg("manga downloader: Reading download directory")
|
||||
|
||||
ret := make(MediaMap)
|
||||
|
||||
files, err := os.ReadDir(d.downloadDir)
|
||||
if err != nil {
|
||||
d.logger.Error().Err(err).Msg("manga downloader: Failed to read download directory")
|
||||
}
|
||||
|
||||
// Hydrate MediaMap by going through all chapter directories
|
||||
mu := sync.Mutex{}
|
||||
wg := sync.WaitGroup{}
|
||||
for _, file := range files {
|
||||
wg.Add(1)
|
||||
go func(file os.DirEntry) {
|
||||
defer wg.Done()
|
||||
|
||||
if file.IsDir() {
|
||||
// e.g. comick_1234_abc_13.5
|
||||
id, ok := chapter_downloader.ParseChapterDirName(file.Name())
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
mu.Lock()
|
||||
newMapInfo := ProviderDownloadMapChapterInfo{
|
||||
ChapterID: id.ChapterId,
|
||||
ChapterNumber: id.ChapterNumber,
|
||||
}
|
||||
|
||||
if _, ok := ret[id.MediaId]; !ok {
|
||||
ret[id.MediaId] = make(map[string][]ProviderDownloadMapChapterInfo)
|
||||
ret[id.MediaId][id.Provider] = []ProviderDownloadMapChapterInfo{newMapInfo}
|
||||
} else {
|
||||
if _, ok := ret[id.MediaId][id.Provider]; !ok {
|
||||
ret[id.MediaId][id.Provider] = []ProviderDownloadMapChapterInfo{newMapInfo}
|
||||
} else {
|
||||
ret[id.MediaId][id.Provider] = append(ret[id.MediaId][id.Provider], newMapInfo)
|
||||
}
|
||||
}
|
||||
mu.Unlock()
|
||||
}
|
||||
}(file)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Trigger hook event
|
||||
ev := &MangaDownloadMapEvent{
|
||||
MediaMap: &ret,
|
||||
}
|
||||
_ = hook.GlobalHookManager.OnMangaDownloadMap().Trigger(ev) // ignore the error
|
||||
// make sure the media map is not nil
|
||||
if ev.MediaMap != nil {
|
||||
ret = *ev.MediaMap
|
||||
}
|
||||
|
||||
d.mediaMap = &ret
|
||||
|
||||
// When done refreshing, send a message to the client to refetch the download data
|
||||
d.wsEventManager.SendEvent(events.RefreshedMangaDownloadData, nil)
|
||||
}
|
||||
445
seanime-2.9.10/internal/manga/downloader/chapter_downloader.go
Normal file
445
seanime-2.9.10/internal/manga/downloader/chapter_downloader.go
Normal file
@@ -0,0 +1,445 @@
|
||||
package chapter_downloader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"image"
|
||||
_ "image/gif"
|
||||
_ "image/jpeg"
|
||||
_ "image/png"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"seanime/internal/database/db"
|
||||
"seanime/internal/events"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
manga_providers "seanime/internal/manga/providers"
|
||||
"seanime/internal/util"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
"github.com/rs/zerolog"
|
||||
_ "golang.org/x/image/bmp" // Register BMP format
|
||||
_ "golang.org/x/image/tiff" // Register Tiff format
|
||||
)
|
||||
|
||||
// 📁 cache/manga
|
||||
// └── 📁 {provider}_{mediaId}_{chapterId}_{chapterNumber} <- Downloader generates
|
||||
// ├── 📄 registry.json <- Contains Registry
|
||||
// ├── 📄 1.jpg
|
||||
// ├── 📄 2.jpg
|
||||
// └── 📄 ...
|
||||
//
|
||||
|
||||
type (
|
||||
// Downloader is used to download chapters from various manga providers.
|
||||
Downloader struct {
|
||||
logger *zerolog.Logger
|
||||
wsEventManager events.WSEventManagerInterface
|
||||
database *db.Database
|
||||
downloadDir string
|
||||
mu sync.Mutex
|
||||
downloadMu sync.Mutex
|
||||
// cancelChannel is used to cancel some or all downloads.
|
||||
cancelChannels map[DownloadID]chan struct{}
|
||||
queue *Queue
|
||||
cancelCh chan struct{} // Close to cancel the download process
|
||||
runCh chan *QueueInfo // Receives a signal to download the next item
|
||||
chapterDownloadedCh chan DownloadID // Sends a signal when a chapter has been downloaded
|
||||
}
|
||||
|
||||
//+-------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
DownloadID struct {
|
||||
Provider string `json:"provider"`
|
||||
MediaId int `json:"mediaId"`
|
||||
ChapterId string `json:"chapterId"`
|
||||
ChapterNumber string `json:"chapterNumber"`
|
||||
}
|
||||
|
||||
//+-------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
// Registry stored in 📄 registry.json for each chapter download.
|
||||
Registry map[int]PageInfo
|
||||
|
||||
PageInfo struct {
|
||||
Index int `json:"index"`
|
||||
Filename string `json:"filename"`
|
||||
OriginalURL string `json:"original_url"`
|
||||
Size int64 `json:"size"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
}
|
||||
)
|
||||
|
||||
type (
|
||||
NewDownloaderOptions struct {
|
||||
Logger *zerolog.Logger
|
||||
WSEventManager events.WSEventManagerInterface
|
||||
DownloadDir string
|
||||
Database *db.Database
|
||||
}
|
||||
|
||||
DownloadOptions struct {
|
||||
DownloadID
|
||||
Pages []*hibikemanga.ChapterPage
|
||||
StartNow bool
|
||||
}
|
||||
)
|
||||
|
||||
func NewDownloader(opts *NewDownloaderOptions) *Downloader {
|
||||
runCh := make(chan *QueueInfo, 1)
|
||||
|
||||
d := &Downloader{
|
||||
logger: opts.Logger,
|
||||
wsEventManager: opts.WSEventManager,
|
||||
downloadDir: opts.DownloadDir,
|
||||
cancelChannels: make(map[DownloadID]chan struct{}),
|
||||
runCh: runCh,
|
||||
queue: NewQueue(opts.Database, opts.Logger, opts.WSEventManager, runCh),
|
||||
chapterDownloadedCh: make(chan DownloadID, 100),
|
||||
}
|
||||
|
||||
return d
|
||||
}
|
||||
|
||||
// Start spins up a goroutine that will listen to queue events.
|
||||
func (cd *Downloader) Start() {
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
// Listen for new queue items
|
||||
case queueInfo := <-cd.runCh:
|
||||
cd.logger.Debug().Msgf("chapter downloader: Received queue item to download: %s", queueInfo.ChapterId)
|
||||
cd.run(queueInfo)
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (cd *Downloader) ChapterDownloaded() <-chan DownloadID {
|
||||
return cd.chapterDownloadedCh
|
||||
}
|
||||
|
||||
// AddToQueue adds a chapter to the download queue.
|
||||
// If the chapter is already downloaded (i.e. a folder already exists), it will delete the previous data and re-download it.
|
||||
func (cd *Downloader) AddToQueue(opts DownloadOptions) error {
|
||||
cd.mu.Lock()
|
||||
defer cd.mu.Unlock()
|
||||
|
||||
downloadId := opts.DownloadID
|
||||
|
||||
// Check if chapter is already downloaded
|
||||
registryPath := cd.getChapterRegistryPath(downloadId)
|
||||
if _, err := os.Stat(registryPath); err == nil {
|
||||
cd.logger.Warn().Msg("chapter downloader: directory already exists, deleting")
|
||||
// Delete folder
|
||||
_ = os.RemoveAll(cd.getChapterDownloadDir(downloadId))
|
||||
}
|
||||
|
||||
// Start download
|
||||
cd.logger.Debug().Msgf("chapter downloader: Adding chapter to download queue: %s", opts.ChapterId)
|
||||
// Add to queue
|
||||
return cd.queue.Add(downloadId, opts.Pages, opts.StartNow)
|
||||
}
|
||||
|
||||
// DeleteChapter deletes a chapter directory from the download directory.
|
||||
func (cd *Downloader) DeleteChapter(id DownloadID) error {
|
||||
cd.mu.Lock()
|
||||
defer cd.mu.Unlock()
|
||||
|
||||
cd.logger.Debug().Msgf("chapter downloader: Deleting chapter %s", id.ChapterId)
|
||||
|
||||
_ = os.RemoveAll(cd.getChapterDownloadDir(id))
|
||||
cd.logger.Debug().Msgf("chapter downloader: Removed chapter %s", id.ChapterId)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Run starts the downloader if it's not already running.
|
||||
func (cd *Downloader) Run() {
|
||||
cd.mu.Lock()
|
||||
defer cd.mu.Unlock()
|
||||
|
||||
cd.logger.Debug().Msg("chapter downloader: Starting queue")
|
||||
|
||||
cd.cancelCh = make(chan struct{})
|
||||
|
||||
cd.queue.Run()
|
||||
}
|
||||
|
||||
// Stop cancels the download process and stops the queue from running.
|
||||
func (cd *Downloader) Stop() {
|
||||
cd.mu.Lock()
|
||||
defer cd.mu.Unlock()
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
cd.logger.Error().Msgf("chapter downloader: cancelCh is already closed")
|
||||
}
|
||||
}()
|
||||
|
||||
cd.cancelCh = make(chan struct{})
|
||||
|
||||
close(cd.cancelCh) // Cancel download process
|
||||
|
||||
cd.queue.Stop()
|
||||
}
|
||||
|
||||
// run downloads the chapter based on the QueueInfo provided.
|
||||
// This is called successively for each current item being processed.
|
||||
// It invokes downloadChapterImages to download the chapter pages.
|
||||
func (cd *Downloader) run(queueInfo *QueueInfo) {
|
||||
|
||||
defer util.HandlePanicInModuleThen("internal/manga/downloader/runNext", func() {
|
||||
cd.logger.Error().Msg("chapter downloader: Panic in 'run'")
|
||||
})
|
||||
|
||||
// Download chapter images
|
||||
if err := cd.downloadChapterImages(queueInfo); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
cd.chapterDownloadedCh <- queueInfo.DownloadID
|
||||
}
|
||||
|
||||
// downloadChapterImages creates a directory for the chapter and downloads each image to that directory.
|
||||
// It also creates a Registry file that contains information about each image.
|
||||
//
|
||||
// e.g.,
|
||||
// 📁 {provider}_{mediaId}_{chapterId}_{chapterNumber}
|
||||
// ├── 📄 registry.json
|
||||
// ├── 📄 1.jpg
|
||||
// ├── 📄 2.jpg
|
||||
// └── 📄 ...
|
||||
func (cd *Downloader) downloadChapterImages(queueInfo *QueueInfo) (err error) {
|
||||
|
||||
// Create download directory
|
||||
// 📁 {provider}_{mediaId}_{chapterId}
|
||||
destination := cd.getChapterDownloadDir(queueInfo.DownloadID)
|
||||
if err = os.MkdirAll(destination, os.ModePerm); err != nil {
|
||||
cd.logger.Error().Err(err).Msgf("chapter downloader: Failed to create download directory for chapter %s", queueInfo.ChapterId)
|
||||
return err
|
||||
}
|
||||
|
||||
cd.logger.Debug().Msgf("chapter downloader: Downloading chapter %s images to %s", queueInfo.ChapterId, destination)
|
||||
|
||||
registry := make(Registry)
|
||||
|
||||
// calculateBatchSize calculates the batch size based on the number of URLs.
|
||||
calculateBatchSize := func(numURLs int) int {
|
||||
maxBatchSize := 5
|
||||
batchSize := numURLs / 10
|
||||
if batchSize < 1 {
|
||||
return 1
|
||||
} else if batchSize > maxBatchSize {
|
||||
return maxBatchSize
|
||||
}
|
||||
return batchSize
|
||||
}
|
||||
|
||||
// Download images
|
||||
batchSize := calculateBatchSize(len(queueInfo.Pages))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, batchSize) // Semaphore to control concurrency
|
||||
for _, page := range queueInfo.Pages {
|
||||
semaphore <- struct{}{} // Acquire semaphore
|
||||
wg.Add(1)
|
||||
go func(page *hibikemanga.ChapterPage, registry *Registry) {
|
||||
defer func() {
|
||||
<-semaphore // Release semaphore
|
||||
wg.Done()
|
||||
}()
|
||||
select {
|
||||
case <-cd.cancelCh:
|
||||
//cd.logger.Warn().Msg("chapter downloader: Download goroutine canceled")
|
||||
return
|
||||
default:
|
||||
cd.downloadPage(page, destination, registry)
|
||||
}
|
||||
}(page, ®istry)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Write the registry
|
||||
_ = registry.save(queueInfo, destination, cd.logger)
|
||||
|
||||
cd.queue.HasCompleted(queueInfo)
|
||||
|
||||
if queueInfo.Status != QueueStatusErrored {
|
||||
cd.logger.Info().Msgf("chapter downloader: Finished downloading chapter %s", queueInfo.ChapterId)
|
||||
}
|
||||
|
||||
if queueInfo.Status == QueueStatusErrored {
|
||||
return fmt.Errorf("chapter downloader: Failed to download chapter %s", queueInfo.ChapterId)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// downloadPage downloads a single page from the URL and saves it to the destination directory.
|
||||
// It also updates the Registry with the page information.
|
||||
func (cd *Downloader) downloadPage(page *hibikemanga.ChapterPage, destination string, registry *Registry) {
|
||||
|
||||
defer util.HandlePanicInModuleThen("manga/downloader/downloadImage", func() {
|
||||
})
|
||||
|
||||
// Download image from URL
|
||||
|
||||
imgID := fmt.Sprintf("%02d", page.Index+1)
|
||||
|
||||
buf, err := manga_providers.GetImageByProxy(page.URL, page.Headers)
|
||||
if err != nil {
|
||||
cd.logger.Error().Err(err).Msgf("chapter downloader: Failed to get image from URL %s", page.URL)
|
||||
return
|
||||
}
|
||||
|
||||
// Get the image format
|
||||
config, format, err := image.DecodeConfig(bytes.NewReader(buf))
|
||||
if err != nil {
|
||||
cd.logger.Error().Err(err).Msgf("chapter downloader: Failed to decode image format from URL %s", page.URL)
|
||||
return
|
||||
}
|
||||
|
||||
filename := imgID + "." + format
|
||||
|
||||
// Create the file
|
||||
filePath := filepath.Join(destination, filename)
|
||||
file, err := os.Create(filePath)
|
||||
if err != nil {
|
||||
cd.logger.Error().Err(err).Msgf("chapter downloader: Failed to create file for image %s", imgID)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Copy the image data to the file
|
||||
_, err = io.Copy(file, bytes.NewReader(buf))
|
||||
if err != nil {
|
||||
cd.logger.Error().Err(err).Msgf("image downloader: Failed to write image data to file for image from %s", page.URL)
|
||||
return
|
||||
}
|
||||
|
||||
// Update registry
|
||||
cd.downloadMu.Lock()
|
||||
(*registry)[page.Index] = PageInfo{
|
||||
Index: page.Index,
|
||||
Width: config.Width,
|
||||
Height: config.Height,
|
||||
Filename: filename,
|
||||
OriginalURL: page.URL,
|
||||
Size: int64(len(buf)),
|
||||
}
|
||||
cd.downloadMu.Unlock()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
////////////////////////
|
||||
|
||||
// save saves the Registry content to a file in the chapter directory.
|
||||
func (r *Registry) save(queueInfo *QueueInfo, destination string, logger *zerolog.Logger) (err error) {
|
||||
|
||||
defer util.HandlePanicInModuleThen("manga/downloader/save", func() {
|
||||
err = fmt.Errorf("chapter downloader: Failed to save registry content")
|
||||
})
|
||||
|
||||
// Verify all images have been downloaded
|
||||
allDownloaded := true
|
||||
for _, page := range queueInfo.Pages {
|
||||
if _, ok := (*r)[page.Index]; !ok {
|
||||
allDownloaded = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !allDownloaded {
|
||||
// Clean up downloaded images
|
||||
logger.Error().Msg("chapter downloader: Not all images have been downloaded, aborting")
|
||||
queueInfo.Status = QueueStatusErrored
|
||||
// Delete directory
|
||||
go os.RemoveAll(destination)
|
||||
return fmt.Errorf("chapter downloader: Not all images have been downloaded, operation aborted")
|
||||
}
|
||||
|
||||
// Create registry file
|
||||
var data []byte
|
||||
data, err = json.Marshal(*r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
registryFilePath := filepath.Join(destination, "registry.json")
|
||||
err = os.WriteFile(registryFilePath, data, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (cd *Downloader) getChapterDownloadDir(downloadId DownloadID) string {
|
||||
return filepath.Join(cd.downloadDir, FormatChapterDirName(downloadId.Provider, downloadId.MediaId, downloadId.ChapterId, downloadId.ChapterNumber))
|
||||
}
|
||||
|
||||
func FormatChapterDirName(provider string, mediaId int, chapterId string, chapterNumber string) string {
|
||||
return fmt.Sprintf("%s_%d_%s_%s", provider, mediaId, EscapeChapterID(chapterId), chapterNumber)
|
||||
}
|
||||
|
||||
// ParseChapterDirName parses a chapter directory name and returns the DownloadID.
|
||||
// e.g. comick_1234_chapter$UNDERSCORE$id_13.5 -> {Provider: "comick", MediaId: 1234, ChapterId: "chapter_id", ChapterNumber: "13.5"}
|
||||
func ParseChapterDirName(dirName string) (id DownloadID, ok bool) {
|
||||
parts := strings.Split(dirName, "_")
|
||||
if len(parts) != 4 {
|
||||
return id, false
|
||||
}
|
||||
|
||||
id.Provider = parts[0]
|
||||
var err error
|
||||
id.MediaId, err = strconv.Atoi(parts[1])
|
||||
if err != nil {
|
||||
return id, false
|
||||
}
|
||||
id.ChapterId = UnescapeChapterID(parts[2])
|
||||
id.ChapterNumber = parts[3]
|
||||
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func EscapeChapterID(id string) string {
|
||||
id = strings.ReplaceAll(id, "/", "$SLASH$")
|
||||
id = strings.ReplaceAll(id, "\\", "$BSLASH$")
|
||||
id = strings.ReplaceAll(id, ":", "$COLON$")
|
||||
id = strings.ReplaceAll(id, "*", "$ASTERISK$")
|
||||
id = strings.ReplaceAll(id, "?", "$QUESTION$")
|
||||
id = strings.ReplaceAll(id, "\"", "$QUOTE$")
|
||||
id = strings.ReplaceAll(id, "<", "$LT$")
|
||||
id = strings.ReplaceAll(id, ">", "$GT$")
|
||||
id = strings.ReplaceAll(id, "|", "$PIPE$")
|
||||
id = strings.ReplaceAll(id, ".", "$DOT$")
|
||||
id = strings.ReplaceAll(id, " ", "$SPACE$")
|
||||
id = strings.ReplaceAll(id, "_", "$UNDERSCORE$")
|
||||
return id
|
||||
}
|
||||
|
||||
func UnescapeChapterID(id string) string {
|
||||
id = strings.ReplaceAll(id, "$SLASH$", "/")
|
||||
id = strings.ReplaceAll(id, "$BSLASH$", "\\")
|
||||
id = strings.ReplaceAll(id, "$COLON$", ":")
|
||||
id = strings.ReplaceAll(id, "$ASTERISK$", "*")
|
||||
id = strings.ReplaceAll(id, "$QUESTION$", "?")
|
||||
id = strings.ReplaceAll(id, "$QUOTE$", "\"")
|
||||
id = strings.ReplaceAll(id, "$LT$", "<")
|
||||
id = strings.ReplaceAll(id, "$GT$", ">")
|
||||
id = strings.ReplaceAll(id, "$PIPE$", "|")
|
||||
id = strings.ReplaceAll(id, "$DOT$", ".")
|
||||
id = strings.ReplaceAll(id, "$SPACE$", " ")
|
||||
id = strings.ReplaceAll(id, "$UNDERSCORE$", "_")
|
||||
return id
|
||||
}
|
||||
|
||||
func (cd *Downloader) getChapterRegistryPath(downloadId DownloadID) string {
|
||||
return filepath.Join(cd.getChapterDownloadDir(downloadId), "registry.json")
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
package chapter_downloader
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"seanime/internal/database/db"
|
||||
"seanime/internal/events"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/manga/providers"
|
||||
"seanime/internal/test_utils"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestQueue(t *testing.T) {
|
||||
test_utils.InitTestProvider(t)
|
||||
|
||||
tempDir := t.TempDir()
|
||||
|
||||
logger := util.NewLogger()
|
||||
database, err := db.NewDatabase(tempDir, test_utils.ConfigData.Database.Name, logger)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create database: %v", err)
|
||||
}
|
||||
|
||||
downloadDir := t.TempDir()
|
||||
|
||||
downloader := NewDownloader(&NewDownloaderOptions{
|
||||
Logger: logger,
|
||||
WSEventManager: events.NewMockWSEventManager(logger),
|
||||
Database: database,
|
||||
DownloadDir: downloadDir,
|
||||
})
|
||||
|
||||
downloader.Start()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
providerName string
|
||||
provider hibikemanga.Provider
|
||||
mangaId string
|
||||
mediaId int
|
||||
chapterIndex uint
|
||||
}{
|
||||
{
|
||||
providerName: manga_providers.ComickProvider,
|
||||
provider: manga_providers.NewComicK(util.NewLogger()),
|
||||
name: "Jujutsu Kaisen",
|
||||
mangaId: "TA22I5O7",
|
||||
chapterIndex: 258,
|
||||
mediaId: 101517,
|
||||
},
|
||||
{
|
||||
providerName: manga_providers.ComickProvider,
|
||||
provider: manga_providers.NewComicK(util.NewLogger()),
|
||||
name: "Jujutsu Kaisen",
|
||||
mangaId: "TA22I5O7",
|
||||
chapterIndex: 259,
|
||||
mediaId: 101517,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
// SETUP
|
||||
chapters, err := tt.provider.FindChapters(tt.mangaId)
|
||||
if assert.NoError(t, err, "comick.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
var chapterInfo *hibikemanga.ChapterDetails
|
||||
for _, chapter := range chapters {
|
||||
if chapter.Index == tt.chapterIndex {
|
||||
chapterInfo = chapter
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if assert.NotNil(t, chapterInfo, "chapter not found") {
|
||||
pages, err := tt.provider.FindChapterPages(chapterInfo.ID)
|
||||
if assert.NoError(t, err, "provider.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
//
|
||||
// TEST
|
||||
//
|
||||
err := downloader.AddToQueue(DownloadOptions{
|
||||
DownloadID: DownloadID{
|
||||
Provider: string(tt.providerName),
|
||||
MediaId: tt.mediaId,
|
||||
ChapterId: chapterInfo.ID,
|
||||
ChapterNumber: chapterInfo.Chapter,
|
||||
},
|
||||
Pages: pages,
|
||||
StartNow: true,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to download chapter: %v", err)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
time.Sleep(10 * time.Second)
|
||||
}
|
||||
223
seanime-2.9.10/internal/manga/downloader/queue.go
Normal file
223
seanime-2.9.10/internal/manga/downloader/queue.go
Normal file
@@ -0,0 +1,223 @@
|
||||
package chapter_downloader
|
||||
|
||||
import (
|
||||
"github.com/goccy/go-json"
|
||||
"github.com/rs/zerolog"
|
||||
"seanime/internal/database/db"
|
||||
"seanime/internal/database/models"
|
||||
"seanime/internal/events"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
QueueStatusNotStarted QueueStatus = "not_started"
|
||||
QueueStatusDownloading QueueStatus = "downloading"
|
||||
QueueStatusErrored QueueStatus = "errored"
|
||||
)
|
||||
|
||||
type (
|
||||
// Queue is used to manage the download queue.
|
||||
// If feeds the downloader with the next item in the queue.
|
||||
Queue struct {
|
||||
logger *zerolog.Logger
|
||||
mu sync.Mutex
|
||||
db *db.Database
|
||||
current *QueueInfo
|
||||
runCh chan *QueueInfo // Channel to tell downloader to run the next item
|
||||
active bool
|
||||
wsEventManager events.WSEventManagerInterface
|
||||
}
|
||||
|
||||
QueueStatus string
|
||||
|
||||
// QueueInfo stores details about the download progress of a chapter.
|
||||
QueueInfo struct {
|
||||
DownloadID
|
||||
Pages []*hibikemanga.ChapterPage
|
||||
DownloadedUrls []string
|
||||
Status QueueStatus
|
||||
}
|
||||
)
|
||||
|
||||
func NewQueue(db *db.Database, logger *zerolog.Logger, wsEventManager events.WSEventManagerInterface, runCh chan *QueueInfo) *Queue {
|
||||
return &Queue{
|
||||
logger: logger,
|
||||
db: db,
|
||||
runCh: runCh,
|
||||
wsEventManager: wsEventManager,
|
||||
}
|
||||
}
|
||||
|
||||
// Add adds a chapter to the download queue.
|
||||
// It tells the queue to download the next item if possible.
|
||||
func (q *Queue) Add(id DownloadID, pages []*hibikemanga.ChapterPage, runNext bool) error {
|
||||
q.mu.Lock()
|
||||
defer q.mu.Unlock()
|
||||
|
||||
marshalled, err := json.Marshal(pages)
|
||||
if err != nil {
|
||||
q.logger.Error().Err(err).Msgf("Failed to marshal pages for id %v", id)
|
||||
return err
|
||||
}
|
||||
|
||||
err = q.db.InsertChapterDownloadQueueItem(&models.ChapterDownloadQueueItem{
|
||||
BaseModel: models.BaseModel{},
|
||||
Provider: id.Provider,
|
||||
MediaID: id.MediaId,
|
||||
ChapterNumber: id.ChapterNumber,
|
||||
ChapterID: id.ChapterId,
|
||||
PageData: marshalled,
|
||||
Status: string(QueueStatusNotStarted),
|
||||
})
|
||||
if err != nil {
|
||||
q.logger.Error().Err(err).Msgf("Failed to insert chapter download queue item for id %v", id)
|
||||
return err
|
||||
}
|
||||
|
||||
q.logger.Info().Msgf("chapter downloader: Added chapter to download queue: %s", id.ChapterId)
|
||||
|
||||
q.wsEventManager.SendEvent(events.ChapterDownloadQueueUpdated, nil)
|
||||
|
||||
if runNext && q.active {
|
||||
// Tells queue to run next if possible
|
||||
go q.runNext()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *Queue) HasCompleted(queueInfo *QueueInfo) {
|
||||
q.mu.Lock()
|
||||
defer q.mu.Unlock()
|
||||
|
||||
if queueInfo.Status == QueueStatusErrored {
|
||||
q.logger.Warn().Msgf("chapter downloader: Errored %s", queueInfo.DownloadID.ChapterId)
|
||||
// Update the status of the current item in the database.
|
||||
_ = q.db.UpdateChapterDownloadQueueItemStatus(q.current.DownloadID.Provider, q.current.DownloadID.MediaId, q.current.DownloadID.ChapterId, string(QueueStatusErrored))
|
||||
} else {
|
||||
q.logger.Debug().Msgf("chapter downloader: Dequeueing %s", queueInfo.DownloadID.ChapterId)
|
||||
// Dequeue the item from the database.
|
||||
_, err := q.db.DequeueChapterDownloadQueueItem()
|
||||
if err != nil {
|
||||
q.logger.Error().Err(err).Msgf("Failed to dequeue chapter download queue item for id %v", queueInfo.DownloadID)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
q.wsEventManager.SendEvent(events.ChapterDownloadQueueUpdated, nil)
|
||||
q.wsEventManager.SendEvent(events.RefreshedMangaDownloadData, nil)
|
||||
|
||||
// Reset current item
|
||||
q.current = nil
|
||||
|
||||
if q.active {
|
||||
// Tells queue to run next if possible
|
||||
q.runNext()
|
||||
}
|
||||
}
|
||||
|
||||
// Run activates the queue and invokes runNext
|
||||
func (q *Queue) Run() {
|
||||
q.mu.Lock()
|
||||
defer q.mu.Unlock()
|
||||
|
||||
if !q.active {
|
||||
q.logger.Debug().Msg("chapter downloader: Starting queue")
|
||||
}
|
||||
|
||||
q.active = true
|
||||
|
||||
// Tells queue to run next if possible
|
||||
q.runNext()
|
||||
}
|
||||
|
||||
// Stop deactivates the queue
|
||||
func (q *Queue) Stop() {
|
||||
q.mu.Lock()
|
||||
defer q.mu.Unlock()
|
||||
|
||||
if q.active {
|
||||
q.logger.Debug().Msg("chapter downloader: Stopping queue")
|
||||
}
|
||||
|
||||
q.active = false
|
||||
}
|
||||
|
||||
// runNext runs the next item in the queue.
|
||||
// - Checks if there is a current item, if so, it returns.
|
||||
// - If nothing is running, it gets the next item (QueueInfo) from the database, sets it as current and sends it to the downloader.
|
||||
func (q *Queue) runNext() {
|
||||
|
||||
q.logger.Debug().Msg("chapter downloader: Processing next item in queue")
|
||||
|
||||
// Catch panic in runNext, so it doesn't bubble up and stop goroutines.
|
||||
defer util.HandlePanicInModuleThen("internal/manga/downloader/runNext", func() {
|
||||
q.logger.Error().Msg("chapter downloader: Panic in 'runNext'")
|
||||
})
|
||||
|
||||
if q.current != nil {
|
||||
q.logger.Debug().Msg("chapter downloader: Current item is not nil")
|
||||
return
|
||||
}
|
||||
|
||||
q.logger.Debug().Msg("chapter downloader: Checking next item in queue")
|
||||
|
||||
// Get next item from the database.
|
||||
next, _ := q.db.GetNextChapterDownloadQueueItem()
|
||||
if next == nil {
|
||||
q.logger.Debug().Msg("chapter downloader: No next item in queue")
|
||||
return
|
||||
}
|
||||
|
||||
id := DownloadID{
|
||||
Provider: next.Provider,
|
||||
MediaId: next.MediaID,
|
||||
ChapterId: next.ChapterID,
|
||||
ChapterNumber: next.ChapterNumber,
|
||||
}
|
||||
|
||||
q.logger.Debug().Msgf("chapter downloader: Preparing next item in queue: %s", id.ChapterId)
|
||||
|
||||
q.wsEventManager.SendEvent(events.ChapterDownloadQueueUpdated, nil)
|
||||
// Update status
|
||||
_ = q.db.UpdateChapterDownloadQueueItemStatus(id.Provider, id.MediaId, id.ChapterId, string(QueueStatusDownloading))
|
||||
|
||||
// Set the current item.
|
||||
q.current = &QueueInfo{
|
||||
DownloadID: id,
|
||||
DownloadedUrls: make([]string, 0),
|
||||
Status: QueueStatusDownloading,
|
||||
}
|
||||
|
||||
// Unmarshal the page data.
|
||||
err := json.Unmarshal(next.PageData, &q.current.Pages)
|
||||
if err != nil {
|
||||
q.logger.Error().Err(err).Msgf("Failed to unmarshal pages for id %v", id)
|
||||
_ = q.db.UpdateChapterDownloadQueueItemStatus(id.Provider, id.MediaId, id.ChapterId, string(QueueStatusNotStarted))
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: This is a temporary fix to prevent the downloader from running too fast.
|
||||
time.Sleep(5 * time.Second)
|
||||
|
||||
q.logger.Info().Msgf("chapter downloader: Running next item in queue: %s", id.ChapterId)
|
||||
|
||||
// Tell Downloader to run
|
||||
q.runCh <- q.current
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
func (q *Queue) GetCurrent() (qi *QueueInfo, ok bool) {
|
||||
q.mu.Lock()
|
||||
defer q.mu.Unlock()
|
||||
|
||||
if q.current == nil {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return q.current, true
|
||||
}
|
||||
323
seanime-2.9.10/internal/manga/downloads.go
Normal file
323
seanime-2.9.10/internal/manga/downloads.go
Normal file
@@ -0,0 +1,323 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/extension"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/hook"
|
||||
chapter_downloader "seanime/internal/manga/downloader"
|
||||
manga_providers "seanime/internal/manga/providers"
|
||||
"slices"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
// GetDownloadedMangaChapterContainers retrieves downloaded chapter containers for a specific manga ID.
|
||||
// It filters the complete set of downloaded chapters to return only those matching the provided manga ID.
|
||||
func (r *Repository) GetDownloadedMangaChapterContainers(mId int, mangaCollection *anilist.MangaCollection) (ret []*ChapterContainer, err error) {
|
||||
|
||||
containers, err := r.GetDownloadedChapterContainers(mangaCollection)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, container := range containers {
|
||||
if container.MediaId == mId {
|
||||
ret = append(ret, container)
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// GetDownloadedChapterContainers retrieves all downloaded manga chapter containers.
|
||||
// It scans the download directory for chapter folders, matches them with manga collection entries,
|
||||
// and collects chapter details from file cache or provider API when necessary.
|
||||
//
|
||||
// Ideally, the provider API should never be called assuming the chapter details are cached.
|
||||
func (r *Repository) GetDownloadedChapterContainers(mangaCollection *anilist.MangaCollection) (ret []*ChapterContainer, err error) {
|
||||
ret = make([]*ChapterContainer, 0)
|
||||
|
||||
// Trigger hook event
|
||||
reqEvent := &MangaDownloadedChapterContainersRequestedEvent{
|
||||
MangaCollection: mangaCollection,
|
||||
ChapterContainers: ret,
|
||||
}
|
||||
err = hook.GlobalHookManager.OnMangaDownloadedChapterContainersRequested().Trigger(reqEvent)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Exception occurred while triggering hook event")
|
||||
return nil, fmt.Errorf("manga: Error in hook, %w", err)
|
||||
}
|
||||
mangaCollection = reqEvent.MangaCollection
|
||||
|
||||
// Default prevented, return the chapter containers
|
||||
if reqEvent.DefaultPrevented {
|
||||
ret = reqEvent.ChapterContainers
|
||||
if ret == nil {
|
||||
return nil, fmt.Errorf("manga: No chapter containers returned by hook event")
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// Read download directory
|
||||
files, err := os.ReadDir(r.downloadDir)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to read download directory")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get all chapter directories
|
||||
// e.g. manga_comick_123_10010_13
|
||||
chapterDirs := make([]string, 0)
|
||||
for _, file := range files {
|
||||
if file.IsDir() {
|
||||
_, ok := chapter_downloader.ParseChapterDirName(file.Name())
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
chapterDirs = append(chapterDirs, file.Name())
|
||||
}
|
||||
}
|
||||
|
||||
if len(chapterDirs) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Now that we have all the chapter directories, we can get the chapter containers
|
||||
|
||||
keys := make([]*chapter_downloader.DownloadID, 0)
|
||||
for _, dir := range chapterDirs {
|
||||
downloadId, ok := chapter_downloader.ParseChapterDirName(dir)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
keys = append(keys, &downloadId)
|
||||
}
|
||||
|
||||
providerAndMediaIdPairs := make(map[struct {
|
||||
provider string
|
||||
mediaId int
|
||||
}]bool)
|
||||
|
||||
for _, key := range keys {
|
||||
providerAndMediaIdPairs[struct {
|
||||
provider string
|
||||
mediaId int
|
||||
}{
|
||||
provider: key.Provider,
|
||||
mediaId: key.MediaId,
|
||||
}] = true
|
||||
}
|
||||
|
||||
// Get the chapter containers
|
||||
for pair := range providerAndMediaIdPairs {
|
||||
provider := pair.provider
|
||||
mediaId := pair.mediaId
|
||||
|
||||
// Get the manga from the collection
|
||||
mangaEntry, ok := mangaCollection.GetListEntryFromMangaId(mediaId)
|
||||
if !ok {
|
||||
r.logger.Warn().Int("mediaId", mediaId).Msg("manga: [GetDownloadedChapterContainers] Manga not found in collection")
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the list of chapters for the manga
|
||||
// Check the permanent file cache
|
||||
container, found := r.getChapterContainerFromPermanentFilecache(provider, mediaId)
|
||||
if !found {
|
||||
// Check the temporary file cache
|
||||
container, found = r.getChapterContainerFromFilecache(provider, mediaId)
|
||||
if !found {
|
||||
// Get the chapters from the provider
|
||||
// This stays here for backwards compatibility, but ideally the method should not require an internet connection
|
||||
// so this will fail if the chapters were not cached & with no internet
|
||||
opts := GetMangaChapterContainerOptions{
|
||||
Provider: provider,
|
||||
MediaId: mediaId,
|
||||
Titles: mangaEntry.GetMedia().GetAllTitles(),
|
||||
Year: mangaEntry.GetMedia().GetStartYearSafe(),
|
||||
}
|
||||
container, err = r.GetMangaChapterContainer(&opts)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Int("mediaId", mediaId).Msg("manga: [GetDownloadedChapterContainers] Failed to retrieve cached list of manga chapters")
|
||||
continue
|
||||
}
|
||||
// Cache the chapter container in the permanent bucket
|
||||
go func() {
|
||||
chapterContainerKey := getMangaChapterContainerCacheKey(provider, mediaId)
|
||||
chapterContainer, found := r.getChapterContainerFromFilecache(provider, mediaId)
|
||||
if found {
|
||||
// Store the chapter container in the permanent bucket
|
||||
permBucket := getPermanentChapterContainerCacheBucket(provider, mediaId)
|
||||
_ = r.fileCacher.SetPerm(permBucket, chapterContainerKey, chapterContainer)
|
||||
}
|
||||
}()
|
||||
}
|
||||
} else {
|
||||
r.logger.Trace().Int("mediaId", mediaId).Msg("manga: Found chapter container in permanent bucket")
|
||||
}
|
||||
|
||||
downloadedContainer := &ChapterContainer{
|
||||
MediaId: container.MediaId,
|
||||
Provider: container.Provider,
|
||||
Chapters: make([]*hibikemanga.ChapterDetails, 0),
|
||||
}
|
||||
|
||||
// Now that we have the container, we'll filter out the chapters that are not downloaded
|
||||
// Go through each chapter and check if it's downloaded
|
||||
for _, chapter := range container.Chapters {
|
||||
// For each chapter, check if the chapter directory exists
|
||||
for _, dir := range chapterDirs {
|
||||
if dir == chapter_downloader.FormatChapterDirName(provider, mediaId, chapter.ID, chapter.Chapter) {
|
||||
downloadedContainer.Chapters = append(downloadedContainer.Chapters, chapter)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(downloadedContainer.Chapters) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
ret = append(ret, downloadedContainer)
|
||||
}
|
||||
|
||||
// Add chapter containers from local provider
|
||||
localProviderB, ok := extension.GetExtension[extension.MangaProviderExtension](r.providerExtensionBank, manga_providers.LocalProvider)
|
||||
if ok {
|
||||
_, ok := localProviderB.GetProvider().(*manga_providers.Local)
|
||||
if ok {
|
||||
for _, list := range mangaCollection.MediaListCollection.GetLists() {
|
||||
for _, entry := range list.GetEntries() {
|
||||
media := entry.GetMedia()
|
||||
opts := GetMangaChapterContainerOptions{
|
||||
Provider: manga_providers.LocalProvider,
|
||||
MediaId: media.GetID(),
|
||||
Titles: media.GetAllTitles(),
|
||||
Year: media.GetStartYearSafe(),
|
||||
}
|
||||
container, err := r.GetMangaChapterContainer(&opts)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
ret = append(ret, container)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Event
|
||||
ev := &MangaDownloadedChapterContainersEvent{
|
||||
ChapterContainers: ret,
|
||||
}
|
||||
err = hook.GlobalHookManager.OnMangaDownloadedChapterContainers().Trigger(ev)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Exception occurred while triggering hook event")
|
||||
return nil, fmt.Errorf("manga: Error in hook, %w", err)
|
||||
}
|
||||
ret = ev.ChapterContainers
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// getDownloadedMangaPageContainer retrieves page information for a downloaded manga chapter.
|
||||
// It reads the chapter directory and parses the registry file to build a PageContainer
|
||||
// with details about each downloaded page including dimensions and file paths.
|
||||
func (r *Repository) getDownloadedMangaPageContainer(
|
||||
provider string,
|
||||
mediaId int,
|
||||
chapterId string,
|
||||
) (*PageContainer, error) {
|
||||
|
||||
// Check if the chapter is downloaded
|
||||
found := false
|
||||
|
||||
// Read download directory
|
||||
files, err := os.ReadDir(r.downloadDir)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to read download directory")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
chapterDir := "" // e.g. manga_comick_123_10010_13
|
||||
for _, file := range files {
|
||||
if file.IsDir() {
|
||||
|
||||
downloadId, ok := chapter_downloader.ParseChapterDirName(file.Name())
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if downloadId.Provider == provider &&
|
||||
downloadId.MediaId == mediaId &&
|
||||
downloadId.ChapterId == chapterId {
|
||||
found = true
|
||||
chapterDir = file.Name()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
return nil, ErrChapterNotDownloaded
|
||||
}
|
||||
|
||||
r.logger.Debug().Msg("manga: Found downloaded chapter directory")
|
||||
|
||||
// Open registry file
|
||||
registryFile, err := os.Open(filepath.Join(r.downloadDir, chapterDir, "registry.json"))
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to open registry file")
|
||||
return nil, err
|
||||
}
|
||||
defer registryFile.Close()
|
||||
|
||||
r.logger.Debug().Str("chapterId", chapterId).Msg("manga: Reading registry file")
|
||||
|
||||
// Read registry file
|
||||
var pageRegistry *chapter_downloader.Registry
|
||||
err = json.NewDecoder(registryFile).Decode(&pageRegistry)
|
||||
if err != nil {
|
||||
r.logger.Error().Err(err).Msg("manga: Failed to decode registry file")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pageList := make([]*hibikemanga.ChapterPage, 0)
|
||||
pageDimensions := make(map[int]*PageDimension)
|
||||
|
||||
// Get the downloaded pages
|
||||
for pageIndex, pageInfo := range *pageRegistry {
|
||||
pageList = append(pageList, &hibikemanga.ChapterPage{
|
||||
Index: pageIndex,
|
||||
URL: filepath.Join(chapterDir, pageInfo.Filename),
|
||||
Provider: provider,
|
||||
})
|
||||
pageDimensions[pageIndex] = &PageDimension{
|
||||
Width: pageInfo.Width,
|
||||
Height: pageInfo.Height,
|
||||
}
|
||||
}
|
||||
|
||||
slices.SortStableFunc(pageList, func(i, j *hibikemanga.ChapterPage) int {
|
||||
return cmp.Compare(i.Index, j.Index)
|
||||
})
|
||||
|
||||
container := &PageContainer{
|
||||
MediaId: mediaId,
|
||||
Provider: provider,
|
||||
ChapterId: chapterId,
|
||||
Pages: pageList,
|
||||
PageDimensions: pageDimensions,
|
||||
IsDownloaded: true,
|
||||
}
|
||||
|
||||
r.logger.Debug().Str("chapterId", chapterId).Msg("manga: Found downloaded chapter")
|
||||
|
||||
return container, nil
|
||||
}
|
||||
85
seanime-2.9.10/internal/manga/hook_events.go
Normal file
85
seanime-2.9.10/internal/manga/hook_events.go
Normal file
@@ -0,0 +1,85 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/hook_resolver"
|
||||
)
|
||||
|
||||
// MangaEntryRequestedEvent is triggered when a manga entry is requested.
|
||||
// Prevent default to skip the default behavior and return the modified entry.
|
||||
// If the modified entry is nil, an error will be returned.
|
||||
type MangaEntryRequestedEvent struct {
|
||||
hook_resolver.Event
|
||||
MediaId int `json:"mediaId"`
|
||||
MangaCollection *anilist.MangaCollection `json:"mangaCollection"`
|
||||
// Empty entry object, will be used if the hook prevents the default behavior
|
||||
Entry *Entry `json:"entry"`
|
||||
}
|
||||
|
||||
// MangaEntryEvent is triggered when the manga entry is being returned.
|
||||
type MangaEntryEvent struct {
|
||||
hook_resolver.Event
|
||||
Entry *Entry `json:"entry"`
|
||||
}
|
||||
|
||||
// MangaLibraryCollectionRequestedEvent is triggered when the manga library collection is being requested.
|
||||
type MangaLibraryCollectionRequestedEvent struct {
|
||||
hook_resolver.Event
|
||||
MangaCollection *anilist.MangaCollection `json:"mangaCollection"`
|
||||
}
|
||||
|
||||
// MangaLibraryCollectionEvent is triggered when the manga library collection is being returned.
|
||||
type MangaLibraryCollectionEvent struct {
|
||||
hook_resolver.Event
|
||||
LibraryCollection *Collection `json:"libraryCollection"`
|
||||
}
|
||||
|
||||
// MangaDownloadedChapterContainersRequestedEvent is triggered when the manga downloaded chapter containers are being requested.
|
||||
// Prevent default to skip the default behavior and return the modified chapter containers.
|
||||
// If the modified chapter containers are nil, an error will be returned.
|
||||
type MangaDownloadedChapterContainersRequestedEvent struct {
|
||||
hook_resolver.Event
|
||||
MangaCollection *anilist.MangaCollection `json:"mangaCollection"`
|
||||
// Empty chapter containers object, will be used if the hook prevents the default behavior
|
||||
ChapterContainers []*ChapterContainer `json:"chapterContainers"`
|
||||
}
|
||||
|
||||
// MangaDownloadedChapterContainersEvent is triggered when the manga downloaded chapter containers are being returned.
|
||||
type MangaDownloadedChapterContainersEvent struct {
|
||||
hook_resolver.Event
|
||||
ChapterContainers []*ChapterContainer `json:"chapterContainers"`
|
||||
}
|
||||
|
||||
// MangaLatestChapterNumbersMapEvent is triggered when the manga latest chapter numbers map is being returned.
|
||||
type MangaLatestChapterNumbersMapEvent struct {
|
||||
hook_resolver.Event
|
||||
LatestChapterNumbersMap map[int][]MangaLatestChapterNumberItem `json:"latestChapterNumbersMap"`
|
||||
}
|
||||
|
||||
// MangaDownloadMapEvent is triggered when the manga download map has been updated.
|
||||
// This map is used to tell the client which chapters have been downloaded.
|
||||
type MangaDownloadMapEvent struct {
|
||||
hook_resolver.Event
|
||||
MediaMap *MediaMap `json:"mediaMap"`
|
||||
}
|
||||
|
||||
// MangaChapterContainerRequestedEvent is triggered when the manga chapter container is being requested.
|
||||
// This event happens before the chapter container is fetched from the cache or provider.
|
||||
// Prevent default to skip the default behavior and return the modified chapter container.
|
||||
// If the modified chapter container is nil, an error will be returned.
|
||||
type MangaChapterContainerRequestedEvent struct {
|
||||
hook_resolver.Event
|
||||
Provider string `json:"provider"`
|
||||
MediaId int `json:"mediaId"`
|
||||
Titles []*string `json:"titles"`
|
||||
Year int `json:"year"`
|
||||
// Empty chapter container object, will be used if the hook prevents the default behavior
|
||||
ChapterContainer *ChapterContainer `json:"chapterContainer"`
|
||||
}
|
||||
|
||||
// MangaChapterContainerEvent is triggered when the manga chapter container is being returned.
|
||||
// This event happens after the chapter container is fetched from the cache or provider.
|
||||
type MangaChapterContainerEvent struct {
|
||||
hook_resolver.Event
|
||||
ChapterContainer *ChapterContainer `json:"chapterContainer"`
|
||||
}
|
||||
18
seanime-2.9.10/internal/manga/image_size_test.go
Normal file
18
seanime-2.9.10/internal/manga/image_size_test.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
_ "image/jpeg" // Register JPEG format
|
||||
_ "image/png" // Register PNG format
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetImageNaturalSize(t *testing.T) {
|
||||
// Test the function
|
||||
width, height, err := getImageNaturalSize("https://scans-hot.leanbox.us/manga/One-Piece/1090-001.png")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
spew.Dump(width, height)
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/events"
|
||||
"seanime/internal/test_utils"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/filecache"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetDownloadedChapterContainers(t *testing.T) {
|
||||
t.Skip("include database")
|
||||
test_utils.SetTwoLevelDeep()
|
||||
test_utils.InitTestProvider(t, test_utils.Anilist())
|
||||
|
||||
anilistClient := anilist.TestGetMockAnilistClient()
|
||||
|
||||
mangaCollection, err := anilistClient.MangaCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
logger := util.NewLogger()
|
||||
cacheDir := filepath.Join(test_utils.ConfigData.Path.DataDir, "cache")
|
||||
fileCacher, err := filecache.NewCacher(cacheDir)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
repository := NewRepository(&NewRepositoryOptions{
|
||||
Logger: logger,
|
||||
FileCacher: fileCacher,
|
||||
CacheDir: cacheDir,
|
||||
ServerURI: "",
|
||||
WsEventManager: events.NewMockWSEventManager(logger),
|
||||
DownloadDir: filepath.Join(test_utils.ConfigData.Path.DataDir, "manga"),
|
||||
Database: nil, // FIX
|
||||
})
|
||||
|
||||
// Test
|
||||
containers, err := repository.GetDownloadedChapterContainers(mangaCollection)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, container := range containers {
|
||||
t.Logf("MediaId: %d", container.MediaId)
|
||||
t.Logf("Provider: %s", container.Provider)
|
||||
t.Logf("Chapters: ")
|
||||
for _, chapter := range container.Chapters {
|
||||
t.Logf(" %s", chapter.Title)
|
||||
}
|
||||
t.Log("-----------------------------------")
|
||||
t.Log("")
|
||||
}
|
||||
|
||||
}
|
||||
114
seanime-2.9.10/internal/manga/manga_entry.go
Normal file
114
seanime-2.9.10/internal/manga/manga_entry.go
Normal file
@@ -0,0 +1,114 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"seanime/internal/api/anilist"
|
||||
"seanime/internal/hook"
|
||||
"seanime/internal/platforms/anilist_platform"
|
||||
"seanime/internal/platforms/platform"
|
||||
"seanime/internal/util/filecache"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
// Entry is fetched when the user goes to the manga entry page.
|
||||
Entry struct {
|
||||
MediaId int `json:"mediaId"`
|
||||
Media *anilist.BaseManga `json:"media"`
|
||||
EntryListData *EntryListData `json:"listData,omitempty"`
|
||||
}
|
||||
|
||||
EntryListData struct {
|
||||
Progress int `json:"progress,omitempty"`
|
||||
Score float64 `json:"score,omitempty"`
|
||||
Status *anilist.MediaListStatus `json:"status,omitempty"`
|
||||
Repeat int `json:"repeat,omitempty"`
|
||||
StartedAt string `json:"startedAt,omitempty"`
|
||||
CompletedAt string `json:"completedAt,omitempty"`
|
||||
}
|
||||
)
|
||||
|
||||
type (
|
||||
// NewEntryOptions is the options for creating a new manga entry.
|
||||
NewEntryOptions struct {
|
||||
MediaId int
|
||||
Logger *zerolog.Logger
|
||||
FileCacher *filecache.Cacher
|
||||
MangaCollection *anilist.MangaCollection
|
||||
Platform platform.Platform
|
||||
}
|
||||
)
|
||||
|
||||
// NewEntry creates a new manga entry.
|
||||
func NewEntry(ctx context.Context, opts *NewEntryOptions) (entry *Entry, err error) {
|
||||
entry = &Entry{
|
||||
MediaId: opts.MediaId,
|
||||
}
|
||||
|
||||
reqEvent := new(MangaEntryRequestedEvent)
|
||||
reqEvent.MediaId = opts.MediaId
|
||||
reqEvent.MangaCollection = opts.MangaCollection
|
||||
reqEvent.Entry = entry
|
||||
|
||||
err = hook.GlobalHookManager.OnMangaEntryRequested().Trigger(reqEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts.MediaId = reqEvent.MediaId // Override the media ID
|
||||
opts.MangaCollection = reqEvent.MangaCollection // Override the manga collection
|
||||
entry = reqEvent.Entry // Override the entry
|
||||
|
||||
if reqEvent.DefaultPrevented {
|
||||
mangaEvent := new(MangaEntryEvent)
|
||||
mangaEvent.Entry = reqEvent.Entry
|
||||
err = hook.GlobalHookManager.OnMangaEntry().Trigger(mangaEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if mangaEvent.Entry == nil {
|
||||
return nil, errors.New("no entry was returned")
|
||||
}
|
||||
return mangaEvent.Entry, nil
|
||||
}
|
||||
|
||||
anilistEntry, found := opts.MangaCollection.GetListEntryFromMangaId(opts.MediaId)
|
||||
|
||||
// If the entry is not found, we fetch the manga from the Anilist API.
|
||||
if !found {
|
||||
media, err := opts.Platform.GetManga(ctx, opts.MediaId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
entry.Media = media
|
||||
|
||||
} else {
|
||||
// If the entry is found, we use the entry from the collection.
|
||||
mangaEvent := new(anilist_platform.GetMangaEvent)
|
||||
mangaEvent.Manga = anilistEntry.GetMedia()
|
||||
err := hook.GlobalHookManager.OnGetManga().Trigger(mangaEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
entry.Media = mangaEvent.Manga
|
||||
entry.EntryListData = &EntryListData{
|
||||
Progress: *anilistEntry.Progress,
|
||||
Score: *anilistEntry.Score,
|
||||
Status: anilistEntry.Status,
|
||||
Repeat: anilistEntry.GetRepeatSafe(),
|
||||
StartedAt: anilist.FuzzyDateToString(anilistEntry.StartedAt),
|
||||
CompletedAt: anilist.FuzzyDateToString(anilistEntry.CompletedAt),
|
||||
}
|
||||
}
|
||||
|
||||
mangaEvent := new(MangaEntryEvent)
|
||||
mangaEvent.Entry = entry
|
||||
err = hook.GlobalHookManager.OnMangaEntry().Trigger(mangaEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return mangaEvent.Entry, nil
|
||||
}
|
||||
32
seanime-2.9.10/internal/manga/mock.go
Normal file
32
seanime-2.9.10/internal/manga/mock.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"seanime/internal/database/db"
|
||||
"seanime/internal/events"
|
||||
"seanime/internal/test_utils"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/filecache"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func GetMockRepository(t *testing.T, db *db.Database) *Repository {
|
||||
logger := util.NewLogger()
|
||||
cacheDir := filepath.Join(test_utils.ConfigData.Path.DataDir, "cache")
|
||||
fileCacher, err := filecache.NewCacher(cacheDir)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
repository := NewRepository(&NewRepositoryOptions{
|
||||
Logger: logger,
|
||||
FileCacher: fileCacher,
|
||||
CacheDir: cacheDir,
|
||||
ServerURI: "",
|
||||
WsEventManager: events.NewMockWSEventManager(logger),
|
||||
DownloadDir: filepath.Join(test_utils.ConfigData.Path.DataDir, "manga"),
|
||||
Database: db,
|
||||
})
|
||||
|
||||
return repository
|
||||
}
|
||||
66
seanime-2.9.10/internal/manga/providers/_local_pdf_test.go
Normal file
66
seanime-2.9.10/internal/manga/providers/_local_pdf_test.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image/jpeg"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestConvertPDFToImages(t *testing.T) {
|
||||
start := time.Now()
|
||||
|
||||
doc, err := fitz.New("")
|
||||
require.NoError(t, err)
|
||||
defer doc.Close()
|
||||
|
||||
images := make(map[int][]byte, doc.NumPage())
|
||||
|
||||
// Load images into memory
|
||||
for n := 0; n < doc.NumPage(); n++ {
|
||||
img, err := doc.Image(n)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = jpeg.Encode(&buf, img, &jpeg.Options{Quality: jpeg.DefaultQuality})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
images[n] = buf.Bytes()
|
||||
}
|
||||
|
||||
end := time.Now()
|
||||
|
||||
t.Logf("Converted %d pages in %f seconds", len(images), end.Sub(start).Seconds())
|
||||
|
||||
for n, imgData := range images {
|
||||
t.Logf("Page %d: %d bytes", n, len(imgData))
|
||||
}
|
||||
|
||||
//tmpDir, err := os.MkdirTemp(os.TempDir(), "manga_test_")
|
||||
//require.NoError(t, err)
|
||||
//if len(images) > 0 {
|
||||
// // Write the first image to a file for verification
|
||||
// firstImagePath := tmpDir + "/page_0.jpg"
|
||||
// err = os.WriteFile(firstImagePath, images[0], 0644)
|
||||
// require.NoError(t, err)
|
||||
// t.Logf("First image written to: %s", firstImagePath)
|
||||
//}
|
||||
//
|
||||
//time.Sleep(1 * time.Minute)
|
||||
//
|
||||
//t.Cleanup(func() {
|
||||
// // Clean up the temporary directory
|
||||
// err := os.RemoveAll(tmpDir)
|
||||
// if err != nil {
|
||||
// t.Logf("Failed to remove temp directory: %v", err)
|
||||
// } else {
|
||||
// t.Logf("Temporary directory removed: %s", tmpDir)
|
||||
// }
|
||||
//})
|
||||
}
|
||||
82
seanime-2.9.10/internal/manga/providers/_template.go
Normal file
82
seanime-2.9.10/internal/manga/providers/_template.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"github.com/rs/zerolog"
|
||||
"net/http"
|
||||
"seanime/internal/util"
|
||||
"time"
|
||||
)
|
||||
|
||||
type (
|
||||
Template struct {
|
||||
Url string
|
||||
Client *http.Client
|
||||
UserAgent string
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
)
|
||||
|
||||
func NewTemplate(logger *zerolog.Logger) *Template {
|
||||
c := &http.Client{
|
||||
Timeout: 60 * time.Second,
|
||||
}
|
||||
c.Transport = util.AddCloudFlareByPass(c.Transport)
|
||||
return &Template{
|
||||
Url: "https://XXXXXX.com",
|
||||
Client: c,
|
||||
UserAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (mp *Template) Search(opts SearchOptions) ([]*SearchResult, error) {
|
||||
results := make([]*SearchResult, 0)
|
||||
|
||||
mp.logger.Debug().Str("query", opts.Query).Msg("XXXXXX: Searching manga")
|
||||
|
||||
// code
|
||||
|
||||
if len(results) == 0 {
|
||||
mp.logger.Error().Str("query", opts.Query).Msg("XXXXXX: No results found")
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(results)).Msg("XXXXXX: Found results")
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (mp *Template) FindChapters(id string) ([]*ChapterDetails, error) {
|
||||
ret := make([]*ChapterDetails, 0)
|
||||
|
||||
mp.logger.Debug().Str("mangaId", id).Msg("XXXXXX: Finding chapters")
|
||||
|
||||
// code
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("mangaId", id).Msg("XXXXXX: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("XXXXXX: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (mp *Template) FindChapterPages(id string) ([]*ChapterPage, error) {
|
||||
ret := make([]*ChapterPage, 0)
|
||||
|
||||
mp.logger.Debug().Str("chapterId", id).Msg("XXXXXX: Finding chapter pages")
|
||||
|
||||
// code
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("chapterId", id).Msg("XXXXXX: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("XXXXXX: Found pages")
|
||||
|
||||
return ret, nil
|
||||
|
||||
}
|
||||
127
seanime-2.9.10/internal/manga/providers/_template_test.go
Normal file
127
seanime-2.9.10/internal/manga/providers/_template_test.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestXXXXXX_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
query: "Boku no Kokoro no Yabai Yatsu",
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewXXXXXX(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := provider.Search(SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "provider.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestXXXXXX_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
id: "",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewXXXXXX(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := provider.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "provider.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestXXXXXX_FindChapterPages(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
chapterId string
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
chapterId: "", // Chapter 1
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewXXXXXX(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
pages, err := provider.FindChapterPages(tt.chapterId)
|
||||
if assert.NoError(t, err, "provider.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
for _, page := range pages {
|
||||
t.Logf("Index: %d", page.Index)
|
||||
t.Logf("\tURL: %s", page.URL)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
376
seanime-2.9.10/internal/manga/providers/comick.go
Normal file
376
seanime-2.9.10/internal/manga/providers/comick.go
Normal file
@@ -0,0 +1,376 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"net/url"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
ComicK struct {
|
||||
Url string
|
||||
Client *req.Client
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
|
||||
ComicKResultItem struct {
|
||||
ID int `json:"id"`
|
||||
HID string `json:"hid"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
Country string `json:"country"`
|
||||
Rating string `json:"rating"`
|
||||
BayesianRating string `json:"bayesian_rating"`
|
||||
RatingCount int `json:"rating_count"`
|
||||
FollowCount int `json:"follow_count"`
|
||||
Description string `json:"desc"`
|
||||
Status int `json:"status"`
|
||||
LastChapter float64 `json:"last_chapter"`
|
||||
TranslationCompleted bool `json:"translation_completed"`
|
||||
ViewCount int `json:"view_count"`
|
||||
ContentRating string `json:"content_rating"`
|
||||
Demographic int `json:"demographic"`
|
||||
UploadedAt string `json:"uploaded_at"`
|
||||
Genres []int `json:"genres"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
UserFollowCount int `json:"user_follow_count"`
|
||||
Year int `json:"year"`
|
||||
MuComics struct {
|
||||
Year int `json:"year"`
|
||||
} `json:"mu_comics"`
|
||||
MdTitles []struct {
|
||||
Title string `json:"title"`
|
||||
} `json:"md_titles"`
|
||||
MdCovers []struct {
|
||||
W int `json:"w"`
|
||||
H int `json:"h"`
|
||||
B2Key string `json:"b2key"`
|
||||
} `json:"md_covers"`
|
||||
Highlight string `json:"highlight"`
|
||||
}
|
||||
)
|
||||
|
||||
func NewComicK(logger *zerolog.Logger) *ComicK {
|
||||
client := req.C().
|
||||
SetUserAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36").
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateSafari()
|
||||
|
||||
return &ComicK{
|
||||
Url: "https://api.comick.fun",
|
||||
Client: client,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// DEVNOTE: Each chapter ID is a unique string provided by ComicK
|
||||
|
||||
func (c *ComicK) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ComicK) Search(opts hibikemanga.SearchOptions) ([]*hibikemanga.SearchResult, error) {
|
||||
searchUrl := fmt.Sprintf("%s/v1.0/search?q=%s&limit=25&page=1", c.Url, url.QueryEscape(opts.Query))
|
||||
if opts.Year != 0 {
|
||||
searchUrl += fmt.Sprintf("&from=%d&to=%d", opts.Year, opts.Year)
|
||||
}
|
||||
|
||||
c.logger.Debug().Str("searchUrl", searchUrl).Msg("comick: Searching manga")
|
||||
|
||||
var data []*ComicKResultItem
|
||||
resp, err := c.Client.R().
|
||||
SetSuccessResult(&data).
|
||||
Get(searchUrl)
|
||||
|
||||
if err != nil {
|
||||
c.logger.Error().Err(err).Msg("comick: Failed to send request")
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
c.logger.Error().Str("status", resp.Status).Msg("comick: Request failed")
|
||||
return nil, fmt.Errorf("failed to reach API: status %s", resp.Status)
|
||||
}
|
||||
|
||||
results := make([]*hibikemanga.SearchResult, 0)
|
||||
for _, result := range data {
|
||||
|
||||
// Skip fan-colored manga
|
||||
if strings.Contains(result.Slug, "fan-colored") {
|
||||
continue
|
||||
}
|
||||
|
||||
var coverURL string
|
||||
if len(result.MdCovers) > 0 && result.MdCovers[0].B2Key != "" {
|
||||
coverURL = "https://meo.comick.pictures/" + result.MdCovers[0].B2Key
|
||||
}
|
||||
|
||||
altTitles := make([]string, len(result.MdTitles))
|
||||
for j, title := range result.MdTitles {
|
||||
altTitles[j] = title.Title
|
||||
}
|
||||
|
||||
// DEVNOTE: We don't compare to alt titles because ComicK's synonyms aren't good
|
||||
compRes, _ := comparison.FindBestMatchWithSorensenDice(&opts.Query, []*string{&result.Title})
|
||||
|
||||
results = append(results, &hibikemanga.SearchResult{
|
||||
ID: result.HID,
|
||||
Title: cmp.Or(result.Title, result.Slug),
|
||||
Synonyms: altTitles,
|
||||
Image: coverURL,
|
||||
Year: result.Year,
|
||||
SearchRating: compRes.Rating,
|
||||
Provider: ComickProvider,
|
||||
})
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
c.logger.Warn().Msg("comick: No results found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
c.logger.Info().Int("count", len(results)).Msg("comick: Found results")
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (c *ComicK) FindChapters(id string) ([]*hibikemanga.ChapterDetails, error) {
|
||||
ret := make([]*hibikemanga.ChapterDetails, 0)
|
||||
|
||||
c.logger.Debug().Str("mangaId", id).Msg("comick: Fetching chapters")
|
||||
|
||||
uri := fmt.Sprintf("%s/comic/%s/chapters?lang=en&page=0&limit=1000000&chap-order=1", c.Url, id)
|
||||
|
||||
var data struct {
|
||||
Chapters []*ComicChapter `json:"chapters"`
|
||||
}
|
||||
|
||||
resp, err := c.Client.R().
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
c.logger.Error().Err(err).Msg("comick: Failed to send request")
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
c.logger.Error().Str("status", resp.Status).Msg("comick: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
chapters := make([]*hibikemanga.ChapterDetails, 0)
|
||||
chaptersMap := make(map[string]*hibikemanga.ChapterDetails)
|
||||
count := 0
|
||||
for _, chapter := range data.Chapters {
|
||||
if chapter.Chap == "" || chapter.Lang != "en" {
|
||||
continue
|
||||
}
|
||||
title := "Chapter " + chapter.Chap + " "
|
||||
|
||||
if title == "" {
|
||||
if chapter.Title == "" {
|
||||
title = "Oneshot"
|
||||
} else {
|
||||
title = chapter.Title
|
||||
}
|
||||
}
|
||||
title = strings.TrimSpace(title)
|
||||
|
||||
prev, ok := chaptersMap[chapter.Chap]
|
||||
rating := chapter.UpCount - chapter.DownCount
|
||||
|
||||
if !ok || rating > prev.Rating {
|
||||
if !ok {
|
||||
count++
|
||||
}
|
||||
chaptersMap[chapter.Chap] = &hibikemanga.ChapterDetails{
|
||||
Provider: ComickProvider,
|
||||
ID: chapter.HID,
|
||||
Title: title,
|
||||
Index: uint(count),
|
||||
URL: fmt.Sprintf("%s/chapter/%s", c.Url, chapter.HID),
|
||||
Chapter: chapter.Chap,
|
||||
Rating: rating,
|
||||
UpdatedAt: chapter.UpdatedAt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, chapter := range chaptersMap {
|
||||
chapters = append(chapters, chapter)
|
||||
}
|
||||
|
||||
// Sort chapters by index
|
||||
slices.SortStableFunc(chapters, func(i, j *hibikemanga.ChapterDetails) int {
|
||||
return cmp.Compare(i.Index, j.Index)
|
||||
})
|
||||
|
||||
ret = append(ret, chapters...)
|
||||
|
||||
if len(ret) == 0 {
|
||||
c.logger.Warn().Msg("comick: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
c.logger.Info().Int("count", len(ret)).Msg("comick: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (c *ComicK) FindChapterPages(id string) ([]*hibikemanga.ChapterPage, error) {
|
||||
ret := make([]*hibikemanga.ChapterPage, 0)
|
||||
|
||||
c.logger.Debug().Str("chapterId", id).Msg("comick: Finding chapter pages")
|
||||
|
||||
uri := fmt.Sprintf("%s/chapter/%s", c.Url, id)
|
||||
|
||||
var data struct {
|
||||
Chapter *ComicChapter `json:"chapter"`
|
||||
}
|
||||
|
||||
resp, err := c.Client.R().
|
||||
SetHeader("User-Agent", util.GetRandomUserAgent()).
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
c.logger.Error().Err(err).Msg("comick: Failed to send request")
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
c.logger.Error().Str("status", resp.Status).Msg("comick: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
if data.Chapter == nil {
|
||||
c.logger.Error().Msg("comick: Chapter not found")
|
||||
return nil, fmt.Errorf("chapter not found")
|
||||
}
|
||||
|
||||
for index, image := range data.Chapter.MdImages {
|
||||
ret = append(ret, &hibikemanga.ChapterPage{
|
||||
Provider: ComickProvider,
|
||||
URL: fmt.Sprintf("https://meo.comick.pictures/%s", image.B2Key),
|
||||
Index: index,
|
||||
Headers: make(map[string]string),
|
||||
})
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
c.logger.Warn().Msg("comick: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
c.logger.Info().Int("count", len(ret)).Msg("comick: Found pages")
|
||||
|
||||
return ret, nil
|
||||
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
type Comic struct {
|
||||
ID int `json:"id"`
|
||||
HID string `json:"hid"`
|
||||
Title string `json:"title"`
|
||||
Country string `json:"country"`
|
||||
Status int `json:"status"`
|
||||
Links struct {
|
||||
AL string `json:"al"`
|
||||
AP string `json:"ap"`
|
||||
BW string `json:"bw"`
|
||||
KT string `json:"kt"`
|
||||
MU string `json:"mu"`
|
||||
AMZ string `json:"amz"`
|
||||
CDJ string `json:"cdj"`
|
||||
EBJ string `json:"ebj"`
|
||||
MAL string `json:"mal"`
|
||||
RAW string `json:"raw"`
|
||||
} `json:"links"`
|
||||
LastChapter interface{} `json:"last_chapter"`
|
||||
ChapterCount int `json:"chapter_count"`
|
||||
Demographic int `json:"demographic"`
|
||||
Hentai bool `json:"hentai"`
|
||||
UserFollowCount int `json:"user_follow_count"`
|
||||
FollowRank int `json:"follow_rank"`
|
||||
CommentCount int `json:"comment_count"`
|
||||
FollowCount int `json:"follow_count"`
|
||||
Description string `json:"desc"`
|
||||
Parsed string `json:"parsed"`
|
||||
Slug string `json:"slug"`
|
||||
Mismatch interface{} `json:"mismatch"`
|
||||
Year int `json:"year"`
|
||||
BayesianRating interface{} `json:"bayesian_rating"`
|
||||
RatingCount int `json:"rating_count"`
|
||||
ContentRating string `json:"content_rating"`
|
||||
TranslationCompleted bool `json:"translation_completed"`
|
||||
RelateFrom []interface{} `json:"relate_from"`
|
||||
Mies interface{} `json:"mies"`
|
||||
MdTitles []struct {
|
||||
Title string `json:"title"`
|
||||
} `json:"md_titles"`
|
||||
MdComicMdGenres []struct {
|
||||
MdGenres struct {
|
||||
Name string `json:"name"`
|
||||
Type interface{} `json:"type"`
|
||||
Slug string `json:"slug"`
|
||||
Group string `json:"group"`
|
||||
} `json:"md_genres"`
|
||||
} `json:"md_comic_md_genres"`
|
||||
MuComics struct {
|
||||
LicensedInEnglish interface{} `json:"licensed_in_english"`
|
||||
MuComicCategories []struct {
|
||||
MuCategories struct {
|
||||
Title string `json:"title"`
|
||||
Slug string `json:"slug"`
|
||||
} `json:"mu_categories"`
|
||||
PositiveVote int `json:"positive_vote"`
|
||||
NegativeVote int `json:"negative_vote"`
|
||||
} `json:"mu_comic_categories"`
|
||||
} `json:"mu_comics"`
|
||||
MdCovers []struct {
|
||||
Vol interface{} `json:"vol"`
|
||||
W int `json:"w"`
|
||||
H int `json:"h"`
|
||||
B2Key string `json:"b2key"`
|
||||
} `json:"md_covers"`
|
||||
Iso6391 string `json:"iso639_1"`
|
||||
LangName string `json:"lang_name"`
|
||||
LangNative string `json:"lang_native"`
|
||||
}
|
||||
|
||||
type ComicChapter struct {
|
||||
ID int `json:"id"`
|
||||
Chap string `json:"chap"`
|
||||
Title string `json:"title"`
|
||||
Vol string `json:"vol,omitempty"`
|
||||
Lang string `json:"lang"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
UpCount int `json:"up_count"`
|
||||
DownCount int `json:"down_count"`
|
||||
GroupName []string `json:"group_name"`
|
||||
HID string `json:"hid"`
|
||||
MdImages []struct {
|
||||
Name string `json:"name"`
|
||||
W int `json:"w"`
|
||||
H int `json:"h"`
|
||||
S int `json:"s"`
|
||||
B2Key string `json:"b2key"`
|
||||
} `json:"md_images"`
|
||||
}
|
||||
249
seanime-2.9.10/internal/manga/providers/comick_multi.go
Normal file
249
seanime-2.9.10/internal/manga/providers/comick_multi.go
Normal file
@@ -0,0 +1,249 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"net/url"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
ComicKMulti struct {
|
||||
Url string
|
||||
Client *req.Client
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
)
|
||||
|
||||
func NewComicKMulti(logger *zerolog.Logger) *ComicKMulti {
|
||||
client := req.C().
|
||||
SetUserAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36").
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateSafari()
|
||||
|
||||
return &ComicKMulti{
|
||||
Url: "https://api.comick.fun",
|
||||
Client: client,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// DEVNOTE: Each chapter ID is a unique string provided by ComicK
|
||||
|
||||
func (c *ComicKMulti) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: true,
|
||||
SupportsMultiLanguage: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ComicKMulti) Search(opts hibikemanga.SearchOptions) ([]*hibikemanga.SearchResult, error) {
|
||||
|
||||
c.logger.Debug().Str("query", opts.Query).Msg("comick: Searching manga")
|
||||
|
||||
searchUrl := fmt.Sprintf("%s/v1.0/search?q=%s&limit=25&page=1", c.Url, url.QueryEscape(opts.Query))
|
||||
if opts.Year != 0 {
|
||||
searchUrl += fmt.Sprintf("&from=%d&to=%d", opts.Year, opts.Year)
|
||||
}
|
||||
|
||||
var data []*ComicKResultItem
|
||||
resp, err := c.Client.R().
|
||||
SetSuccessResult(&data).
|
||||
Get(searchUrl)
|
||||
|
||||
if err != nil {
|
||||
c.logger.Error().Err(err).Msg("comick: Failed to send request")
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
c.logger.Error().Str("status", resp.Status).Msg("comick: Request failed")
|
||||
return nil, fmt.Errorf("failed to reach API: status %s", resp.Status)
|
||||
}
|
||||
|
||||
results := make([]*hibikemanga.SearchResult, 0)
|
||||
for _, result := range data {
|
||||
|
||||
// Skip fan-colored manga
|
||||
if strings.Contains(result.Slug, "fan-colored") {
|
||||
continue
|
||||
}
|
||||
|
||||
var coverURL string
|
||||
if len(result.MdCovers) > 0 && result.MdCovers[0].B2Key != "" {
|
||||
coverURL = "https://meo.comick.pictures/" + result.MdCovers[0].B2Key
|
||||
}
|
||||
|
||||
altTitles := make([]string, len(result.MdTitles))
|
||||
for j, title := range result.MdTitles {
|
||||
altTitles[j] = title.Title
|
||||
}
|
||||
|
||||
// DEVNOTE: We don't compare to alt titles because ComicK's synonyms aren't good
|
||||
compRes, _ := comparison.FindBestMatchWithSorensenDice(&opts.Query, []*string{&result.Title})
|
||||
|
||||
results = append(results, &hibikemanga.SearchResult{
|
||||
ID: result.HID,
|
||||
Title: cmp.Or(result.Title, result.Slug),
|
||||
Synonyms: altTitles,
|
||||
Image: coverURL,
|
||||
Year: result.Year,
|
||||
SearchRating: compRes.Rating,
|
||||
Provider: ComickProvider,
|
||||
})
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
c.logger.Warn().Msg("comick: No results found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
c.logger.Info().Int("count", len(results)).Msg("comick: Found results")
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (c *ComicKMulti) FindChapters(id string) ([]*hibikemanga.ChapterDetails, error) {
|
||||
ret := make([]*hibikemanga.ChapterDetails, 0)
|
||||
|
||||
// c.logger.Debug().Str("mangaId", id).Msg("comick: Fetching chapters")
|
||||
|
||||
uri := fmt.Sprintf("%s/comic/%s/chapters?page=0&limit=1000000&chap-order=1", c.Url, id)
|
||||
c.logger.Debug().Str("mangaId", id).Str("uri", uri).Msg("comick: Fetching chapters")
|
||||
|
||||
var data struct {
|
||||
Chapters []*ComicChapter `json:"chapters"`
|
||||
}
|
||||
|
||||
resp, err := c.Client.R().
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
c.logger.Error().Err(err).Msg("comick: Failed to send request")
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
c.logger.Error().Str("status", resp.Status).Msg("comick: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
chapters := make([]*hibikemanga.ChapterDetails, 0)
|
||||
chaptersCountMap := make(map[string]int)
|
||||
for _, chapter := range data.Chapters {
|
||||
if chapter.Chap == "" {
|
||||
continue
|
||||
}
|
||||
title := "Chapter " + chapter.Chap + " "
|
||||
|
||||
if title == "" {
|
||||
if chapter.Title == "" {
|
||||
title = "Oneshot"
|
||||
} else {
|
||||
title = chapter.Title
|
||||
}
|
||||
}
|
||||
title = strings.TrimSpace(title)
|
||||
|
||||
groupName := ""
|
||||
if len(chapter.GroupName) > 0 {
|
||||
groupName = chapter.GroupName[0]
|
||||
}
|
||||
|
||||
count, ok := chaptersCountMap[groupName]
|
||||
if !ok {
|
||||
chaptersCountMap[groupName] = 0
|
||||
count = 0
|
||||
}
|
||||
chapters = append(chapters, &hibikemanga.ChapterDetails{
|
||||
Provider: ComickProvider,
|
||||
ID: chapter.HID,
|
||||
Title: title,
|
||||
Language: chapter.Lang,
|
||||
Index: uint(count),
|
||||
URL: fmt.Sprintf("%s/chapter/%s", c.Url, chapter.HID),
|
||||
Chapter: chapter.Chap,
|
||||
Scanlator: groupName,
|
||||
Rating: 0,
|
||||
UpdatedAt: chapter.UpdatedAt,
|
||||
})
|
||||
chaptersCountMap[groupName]++
|
||||
}
|
||||
|
||||
// Sort chapters by index
|
||||
slices.SortStableFunc(chapters, func(i, j *hibikemanga.ChapterDetails) int {
|
||||
return cmp.Compare(i.Index, j.Index)
|
||||
})
|
||||
|
||||
ret = append(ret, chapters...)
|
||||
|
||||
if len(ret) == 0 {
|
||||
c.logger.Warn().Msg("comick: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
c.logger.Info().Int("count", len(ret)).Msg("comick: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (c *ComicKMulti) FindChapterPages(id string) ([]*hibikemanga.ChapterPage, error) {
|
||||
ret := make([]*hibikemanga.ChapterPage, 0)
|
||||
|
||||
c.logger.Debug().Str("chapterId", id).Msg("comick: Finding chapter pages")
|
||||
|
||||
uri := fmt.Sprintf("%s/chapter/%s", c.Url, id)
|
||||
|
||||
var data struct {
|
||||
Chapter *ComicChapter `json:"chapter"`
|
||||
}
|
||||
|
||||
resp, err := c.Client.R().
|
||||
SetHeader("User-Agent", util.GetRandomUserAgent()).
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
c.logger.Error().Err(err).Msg("comick: Failed to send request")
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
c.logger.Error().Str("status", resp.Status).Msg("comick: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
if data.Chapter == nil {
|
||||
c.logger.Error().Msg("comick: Chapter not found")
|
||||
return nil, fmt.Errorf("chapter not found")
|
||||
}
|
||||
|
||||
for index, image := range data.Chapter.MdImages {
|
||||
ret = append(ret, &hibikemanga.ChapterPage{
|
||||
Provider: ComickProvider,
|
||||
URL: fmt.Sprintf("https://meo.comick.pictures/%s", image.B2Key),
|
||||
Index: index,
|
||||
Headers: make(map[string]string),
|
||||
})
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
c.logger.Warn().Msg("comick: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
c.logger.Info().Int("count", len(ret)).Msg("comick: Found pages")
|
||||
|
||||
return ret, nil
|
||||
|
||||
}
|
||||
224
seanime-2.9.10/internal/manga/providers/comick_test.go
Normal file
224
seanime-2.9.10/internal/manga/providers/comick_test.go
Normal file
@@ -0,0 +1,224 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestComicK_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "One Piece",
|
||||
query: "One Piece",
|
||||
},
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
query: "Jujutsu Kaisen",
|
||||
},
|
||||
{
|
||||
name: "Komi-san wa, Komyushou desu",
|
||||
query: "Komi-san wa, Komyushou desu",
|
||||
},
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
query: "Boku no Kokoro no Yabai Yatsu",
|
||||
},
|
||||
}
|
||||
|
||||
comick := NewComicK(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := comick.Search(hibikemanga.SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "comick.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestComicK_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
id: "TA22I5O7",
|
||||
atLeast: 250,
|
||||
},
|
||||
{
|
||||
name: "Komi-san wa, Komyushou desu",
|
||||
id: "K_Dn8VW7",
|
||||
atLeast: 250,
|
||||
},
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
id: "pYN47sZm",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
comick := NewComicK(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := comick.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "comick.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tChapter: %s", chapter.Chapter)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestComicKMulti_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
id: "TA22I5O7",
|
||||
atLeast: 250,
|
||||
},
|
||||
{
|
||||
name: "Komi-san wa, Komyushou desu",
|
||||
id: "K_Dn8VW7",
|
||||
atLeast: 250,
|
||||
},
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
id: "pYN47sZm",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
comick := NewComicKMulti(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := comick.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "comick.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tLanguage: %s", chapter.Language)
|
||||
t.Logf("\tScanlator: %s", chapter.Scanlator)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tChapter: %s", chapter.Chapter)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestComicK_FindChapterPages(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
index uint
|
||||
}{
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
id: "TA22I5O7",
|
||||
index: 258,
|
||||
},
|
||||
}
|
||||
|
||||
comick := NewComicK(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := comick.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "comick.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
var chapterInfo *hibikemanga.ChapterDetails
|
||||
for _, chapter := range chapters {
|
||||
if chapter.Index == tt.index {
|
||||
chapterInfo = chapter
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if assert.NotNil(t, chapterInfo, "chapter not found") {
|
||||
pages, err := comick.FindChapterPages(chapterInfo.ID)
|
||||
if assert.NoError(t, err, "comick.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
for _, page := range pages {
|
||||
t.Logf("Index: %d", page.Index)
|
||||
t.Logf("\tURL: %s", page.URL)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
14
seanime-2.9.10/internal/manga/providers/helpers.go
Normal file
14
seanime-2.9.10/internal/manga/providers/helpers.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package manga_providers
|
||||
|
||||
import "strings"
|
||||
|
||||
// GetNormalizedChapter returns a normalized chapter string.
|
||||
// e.g. "0001" -> "1"
|
||||
func GetNormalizedChapter(chapter string) string {
|
||||
// Trim padding zeros
|
||||
unpaddedChStr := strings.TrimLeft(chapter, "0")
|
||||
if unpaddedChStr == "" {
|
||||
unpaddedChStr = "0"
|
||||
}
|
||||
return unpaddedChStr
|
||||
}
|
||||
556
seanime-2.9.10/internal/manga/providers/local.go
Normal file
556
seanime-2.9.10/internal/manga/providers/local.go
Normal file
@@ -0,0 +1,556 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"fmt"
|
||||
// "image/jpeg"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
// "github.com/gen2brain/go-fitz"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/samber/lo"
|
||||
)
|
||||
|
||||
const (
|
||||
LocalServePath = "{{manga-local-assets}}"
|
||||
)
|
||||
|
||||
type Local struct {
|
||||
dir string // Directory to scan for manga
|
||||
logger *zerolog.Logger
|
||||
|
||||
mu sync.Mutex
|
||||
currentChapterPath string
|
||||
currentZipCloser io.Closer
|
||||
currentPages map[string]*loadedPage
|
||||
}
|
||||
|
||||
type loadedPage struct {
|
||||
buf []byte
|
||||
page *hibikemanga.ChapterPage
|
||||
}
|
||||
|
||||
// chapterEntry represents a potential chapter file or directory found during scanning
|
||||
type chapterEntry struct {
|
||||
RelativePath string // Path relative to manga root (e.g., "mangaID/chapter1.cbz" or "mangaID/vol1/ch1.cbz")
|
||||
IsDir bool // Whether this entry is a directory
|
||||
}
|
||||
|
||||
func NewLocal(dir string, logger *zerolog.Logger) hibikemanga.Provider {
|
||||
_ = os.MkdirAll(dir, 0755)
|
||||
|
||||
return &Local{
|
||||
dir: dir,
|
||||
logger: logger,
|
||||
currentPages: make(map[string]*loadedPage),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Local) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Local) SetSourceDirectory(dir string) {
|
||||
if dir != "" {
|
||||
p.dir = dir
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Local) getAllManga() (res []*hibikemanga.SearchResult, err error) {
|
||||
if p.dir == "" {
|
||||
return make([]*hibikemanga.SearchResult, 0), nil
|
||||
}
|
||||
|
||||
entries, err := os.ReadDir(p.dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res = make([]*hibikemanga.SearchResult, 0)
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
res = append(res, &hibikemanga.SearchResult{
|
||||
ID: entry.Name(),
|
||||
Title: entry.Name(),
|
||||
Provider: LocalProvider,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (p *Local) Search(opts hibikemanga.SearchOptions) (res []*hibikemanga.SearchResult, err error) {
|
||||
res = make([]*hibikemanga.SearchResult, 0)
|
||||
all, err := p.getAllManga()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if opts.Query == "" {
|
||||
return all, nil
|
||||
}
|
||||
|
||||
allTitles := make([]*string, len(all))
|
||||
for i, manga := range all {
|
||||
allTitles[i] = &manga.Title
|
||||
}
|
||||
compRes := comparison.CompareWithLevenshteinCleanFunc(&opts.Query, allTitles, cleanMangaTitle)
|
||||
|
||||
var bestMatch *comparison.LevenshteinResult
|
||||
for _, res := range compRes {
|
||||
if bestMatch == nil || res.Distance < bestMatch.Distance {
|
||||
bestMatch = res
|
||||
}
|
||||
}
|
||||
|
||||
if bestMatch == nil {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
if bestMatch.Distance > 3 {
|
||||
// If the best match is too far away, return no results
|
||||
return res, nil
|
||||
}
|
||||
|
||||
manga, ok := lo.Find(all, func(manga *hibikemanga.SearchResult) bool {
|
||||
return manga.Title == *bestMatch.Value
|
||||
})
|
||||
|
||||
if !ok {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
res = append(res, manga)
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func cleanMangaTitle(title string) string {
|
||||
title = strings.TrimSpace(title)
|
||||
|
||||
// Remove some characters to make comparison easier
|
||||
title = strings.Map(func(r rune) rune {
|
||||
if r == '/' || r == '\\' || r == ':' || r == '*' || r == '?' || r == '!' || r == '"' || r == '<' || r == '>' || r == '|' || r == ',' {
|
||||
return rune(0)
|
||||
}
|
||||
return r
|
||||
}, title)
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
// FindChapters scans the manga series directory and returns the chapters.
|
||||
// Supports nested folder structures up to 2 levels deep.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// Series title/
|
||||
// ├── Chapter 1/
|
||||
// │ ├── image_1.ext
|
||||
// │ └── image_n.ext
|
||||
// ├── Chapter 2.pdf
|
||||
// └── Ch 1-10/
|
||||
// ├── Ch 1/
|
||||
// └── Ch 2/
|
||||
func (p *Local) FindChapters(mangaID string) (res []*hibikemanga.ChapterDetails, err error) {
|
||||
if p.dir == "" {
|
||||
return make([]*hibikemanga.ChapterDetails, 0), nil
|
||||
}
|
||||
|
||||
mangaPath := filepath.Join(p.dir, mangaID)
|
||||
|
||||
p.logger.Trace().Str("mangaPath", mangaPath).Msg("manga: Finding local chapters")
|
||||
|
||||
// Collect all potential chapter entries up to 2 levels deep
|
||||
chapterEntries, err := p.collectChapterEntries(mangaPath, mangaID, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res = make([]*hibikemanga.ChapterDetails, 0)
|
||||
// Go through all collected entries.
|
||||
for _, entry := range chapterEntries {
|
||||
scannedEntry, ok := scanChapterFilename(filepath.Base(entry.RelativePath))
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(scannedEntry.Chapter) != 1 {
|
||||
// Handle one-shots (no chapter number and only one entry)
|
||||
if len(scannedEntry.Chapter) == 0 && len(chapterEntries) == 1 {
|
||||
chapterTitle := "Chapter 1"
|
||||
if scannedEntry.ChapterTitle != "" {
|
||||
chapterTitle += " - " + scannedEntry.ChapterTitle
|
||||
}
|
||||
res = append(res, &hibikemanga.ChapterDetails{
|
||||
Provider: LocalProvider,
|
||||
ID: filepath.ToSlash(entry.RelativePath), // ID is the relative filepath, e.g. "/series/chapter_1.cbz" or "/series/vol1/ch1.cbz"
|
||||
URL: "",
|
||||
Title: chapterTitle,
|
||||
Chapter: "1",
|
||||
Index: 0, // placeholder, will be set later
|
||||
LocalIsPDF: scannedEntry.IsPDF,
|
||||
})
|
||||
} else if len(scannedEntry.Chapter) == 2 {
|
||||
// Handle combined chapters (e.g. "Chapter 1-2")
|
||||
chapterTitle := "Chapter " + cleanChapter(scannedEntry.Chapter[0]) + "-" + cleanChapter(scannedEntry.Chapter[1])
|
||||
if scannedEntry.ChapterTitle != "" {
|
||||
chapterTitle += " - " + scannedEntry.ChapterTitle
|
||||
}
|
||||
res = append(res, &hibikemanga.ChapterDetails{
|
||||
Provider: LocalProvider,
|
||||
ID: filepath.ToSlash(entry.RelativePath), // ID is the relative filepath, e.g. "/series/chapter_1.cbz" or "/series/vol1/ch1.cbz"
|
||||
URL: "",
|
||||
Title: chapterTitle,
|
||||
// Use the last chapter number as the chapter for progress tracking
|
||||
Chapter: cleanChapter(scannedEntry.Chapter[1]),
|
||||
Index: 0, // placeholder, will be set later
|
||||
LocalIsPDF: scannedEntry.IsPDF,
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
ch := cleanChapter(scannedEntry.Chapter[0])
|
||||
chapterTitle := "Chapter " + ch
|
||||
if scannedEntry.ChapterTitle != "" {
|
||||
chapterTitle += " - " + scannedEntry.ChapterTitle
|
||||
}
|
||||
|
||||
res = append(res, &hibikemanga.ChapterDetails{
|
||||
Provider: LocalProvider,
|
||||
ID: filepath.ToSlash(entry.RelativePath), // ID is the relative filepath, e.g. "/series/chapter_1.cbz" or "/series/vol1/ch1.cbz"
|
||||
URL: "",
|
||||
Title: chapterTitle,
|
||||
Chapter: ch,
|
||||
Index: 0, // placeholder, will be set later
|
||||
LocalIsPDF: scannedEntry.IsPDF,
|
||||
})
|
||||
}
|
||||
|
||||
// sort by chapter number (ascending)
|
||||
slices.SortFunc(res, func(a, b *hibikemanga.ChapterDetails) int {
|
||||
chA, _ := strconv.ParseFloat(a.Chapter, 64)
|
||||
chB, _ := strconv.ParseFloat(b.Chapter, 64)
|
||||
return int(chA - chB)
|
||||
})
|
||||
|
||||
// set the indexes
|
||||
for i, chapter := range res {
|
||||
chapter.Index = uint(i)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// collectChapterEntries walks the directory tree up to maxDepth levels deep and collects
|
||||
// all potential chapter files and directories.
|
||||
func (p *Local) collectChapterEntries(currentPath, mangaID string, currentDepth int) (entries []*chapterEntry, err error) {
|
||||
const maxDepth = 2
|
||||
|
||||
if currentDepth > maxDepth {
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
dirEntries, err := os.ReadDir(currentPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
entries = make([]*chapterEntry, 0)
|
||||
|
||||
for _, entry := range dirEntries {
|
||||
entryPath := filepath.Join(currentPath, entry.Name())
|
||||
|
||||
// Calculate relative path from manga root
|
||||
var relativePath string
|
||||
if currentDepth == 0 {
|
||||
// At manga root level
|
||||
relativePath = filepath.Join(mangaID, entry.Name())
|
||||
} else {
|
||||
// Get the relative part from current path
|
||||
relativeFromManga, err := filepath.Rel(filepath.Join(p.dir, mangaID), entryPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
relativePath = filepath.Join(mangaID, relativeFromManga)
|
||||
}
|
||||
|
||||
if entry.IsDir() {
|
||||
// Check if this directory contains only images (making it a chapter directory)
|
||||
isImageDirectory, _ := p.isImageOnlyDirectory(entryPath)
|
||||
|
||||
if isImageDirectory {
|
||||
// Directory contains only images, treat it as a chapter
|
||||
entries = append(entries, &chapterEntry{
|
||||
RelativePath: relativePath,
|
||||
IsDir: true,
|
||||
})
|
||||
} else if currentDepth < maxDepth {
|
||||
// Directory doesn't contain only images, recursively scan subdirectories
|
||||
subEntries, err := p.collectChapterEntries(entryPath, mangaID, currentDepth+1)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// If subdirectory contains chapters, add them
|
||||
if len(subEntries) > 0 {
|
||||
entries = append(entries, subEntries...)
|
||||
} else {
|
||||
// If no sub-chapters found, treat directory itself as potential chapter
|
||||
entries = append(entries, &chapterEntry{
|
||||
RelativePath: relativePath,
|
||||
IsDir: true,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// At max depth, treat directory as potential chapter
|
||||
entries = append(entries, &chapterEntry{
|
||||
RelativePath: relativePath,
|
||||
IsDir: true,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// File entry - check if it's a potential chapter file
|
||||
ext := strings.ToLower(filepath.Ext(entry.Name()))
|
||||
if ext == ".cbz" || ext == ".cbr" || ext == ".pdf" || ext == ".zip" {
|
||||
entries = append(entries, &chapterEntry{
|
||||
RelativePath: relativePath,
|
||||
IsDir: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
// isImageOnlyDirectory checks if a directory contains only image files (no subdirectories or other files)
|
||||
func (p *Local) isImageOnlyDirectory(dirPath string) (bool, error) {
|
||||
entries, err := os.ReadDir(dirPath)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if len(entries) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
hasImages := false
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if isFileImage(entry.Name()) {
|
||||
hasImages = true
|
||||
} else {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
return hasImages, nil
|
||||
}
|
||||
|
||||
// "0001" -> "1", "0" -> "0"
|
||||
func cleanChapter(ch string) string {
|
||||
if ch == "" {
|
||||
return ""
|
||||
}
|
||||
if ch == "0" {
|
||||
return "0"
|
||||
}
|
||||
if strings.HasPrefix(ch, "0") {
|
||||
return strings.TrimLeft(ch, "0")
|
||||
}
|
||||
return ch
|
||||
}
|
||||
|
||||
// FindChapterPages will extract the images
|
||||
func (p *Local) FindChapterPages(id string) (ret []*hibikemanga.ChapterPage, err error) {
|
||||
if p.dir == "" {
|
||||
return make([]*hibikemanga.ChapterPage, 0), nil
|
||||
}
|
||||
|
||||
// id = filepath
|
||||
// e.g. "series/chapter_1.cbz"
|
||||
fullpath := filepath.Join(p.dir, id) // e.g. "/collection/series/chapter_1.cbz"
|
||||
|
||||
// Prefix with {{manga-local-assets}} to signal the client that this is a local file
|
||||
// e.g. "{{manga-local-assets}}/series/chapter_1.cbz/image_1.jpg"
|
||||
formatUrl := func(fileName string) string {
|
||||
return filepath.ToSlash(filepath.Join(LocalServePath, id, fileName))
|
||||
}
|
||||
|
||||
ext := filepath.Ext(fullpath)
|
||||
|
||||
// Close the current pages
|
||||
if p.currentZipCloser != nil {
|
||||
_ = p.currentZipCloser.Close()
|
||||
}
|
||||
for _, loadedPage := range p.currentPages {
|
||||
loadedPage.buf = nil
|
||||
}
|
||||
p.currentPages = make(map[string]*loadedPage)
|
||||
p.currentZipCloser = nil
|
||||
p.currentChapterPath = fullpath
|
||||
|
||||
switch ext {
|
||||
case ".zip", ".cbz":
|
||||
r, err := zip.OpenReader(fullpath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
for _, f := range r.File {
|
||||
if !isFileImage(f.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
page, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open page: %w", err)
|
||||
}
|
||||
buf, err := io.ReadAll(page)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read page: %w", err)
|
||||
}
|
||||
p.currentPages[strings.ToLower(f.Name)] = &loadedPage{
|
||||
buf: buf,
|
||||
page: &hibikemanga.ChapterPage{
|
||||
Provider: LocalProvider,
|
||||
URL: formatUrl(f.Name),
|
||||
Index: 0, // placeholder, will be set later
|
||||
Buf: buf,
|
||||
},
|
||||
}
|
||||
}
|
||||
case ".pdf":
|
||||
// doc, err := fitz.New(fullpath)
|
||||
// if err != nil {
|
||||
// return nil, fmt.Errorf("failed to open PDF file: %w", err)
|
||||
// }
|
||||
// defer doc.Close()
|
||||
|
||||
// // Load images into memory
|
||||
// for n := 0; n < doc.NumPage(); n++ {
|
||||
// img, err := doc.Image(n)
|
||||
// if err != nil {
|
||||
// panic(err)
|
||||
// }
|
||||
|
||||
// var buf bytes.Buffer
|
||||
// err = jpeg.Encode(&buf, img, &jpeg.Options{Quality: jpeg.DefaultQuality})
|
||||
// if err != nil {
|
||||
// panic(err)
|
||||
// }
|
||||
|
||||
// p.currentPages[fmt.Sprintf("page_%d.jpg", n)] = &loadedPage{
|
||||
// buf: buf.Bytes(),
|
||||
// page: &hibikemanga.ChapterPage{
|
||||
// Provider: LocalProvider,
|
||||
// URL: formatUrl(fmt.Sprintf("page_%d.jpg", n)),
|
||||
// Index: n,
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
default:
|
||||
// If it's a directory of images
|
||||
stat, err := os.Stat(fullpath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to stat file: %w", err)
|
||||
}
|
||||
if !stat.IsDir() {
|
||||
return nil, fmt.Errorf("file is not a directory: %s", fullpath)
|
||||
}
|
||||
|
||||
entries, err := os.ReadDir(fullpath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read directory: %w", err)
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if !isFileImage(entry.Name()) {
|
||||
continue
|
||||
}
|
||||
|
||||
page, err := os.Open(filepath.Join(fullpath, entry.Name()))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open page: %w", err)
|
||||
}
|
||||
buf, err := io.ReadAll(page)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read page: %w", err)
|
||||
}
|
||||
p.currentPages[strings.ToLower(entry.Name())] = &loadedPage{
|
||||
buf: buf,
|
||||
page: &hibikemanga.ChapterPage{
|
||||
Provider: LocalProvider,
|
||||
URL: formatUrl(entry.Name()),
|
||||
Index: 0, // placeholder, will be set later
|
||||
Buf: buf,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type pageStruct struct {
|
||||
Number float64
|
||||
LoadedPage *loadedPage
|
||||
}
|
||||
|
||||
pages := make([]*pageStruct, 0)
|
||||
|
||||
// Parse and order the pages
|
||||
for _, loadedPage := range p.currentPages {
|
||||
scannedPage, ok := parsePageFilename(filepath.Base(loadedPage.page.URL))
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
pages = append(pages, &pageStruct{
|
||||
Number: scannedPage.Number,
|
||||
LoadedPage: loadedPage,
|
||||
})
|
||||
}
|
||||
|
||||
// Sort pages
|
||||
slices.SortFunc(pages, func(a, b *pageStruct) int {
|
||||
return strings.Compare(filepath.Base(a.LoadedPage.page.URL), filepath.Base(b.LoadedPage.page.URL))
|
||||
})
|
||||
|
||||
ret = make([]*hibikemanga.ChapterPage, 0)
|
||||
for idx, pageStruct := range pages {
|
||||
pageStruct.LoadedPage.page.Index = idx
|
||||
ret = append(ret, pageStruct.LoadedPage.page)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (p *Local) ReadPage(path string) (ret io.ReadCloser, err error) {
|
||||
// e.g. path = "/series/chapter_1.cbz/image_1.jpg"
|
||||
|
||||
// If the pages are already in memory, return them
|
||||
if len(p.currentPages) > 0 {
|
||||
page, ok := p.currentPages[strings.ToLower(filepath.Base(path))]
|
||||
if ok {
|
||||
return io.NopCloser(bytes.NewReader(page.buf)), nil // Return the page
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("page not found: %s", path)
|
||||
}
|
||||
823
seanime-2.9.10/internal/manga/providers/local_parser.go
Normal file
823
seanime-2.9.10/internal/manga/providers/local_parser.go
Normal file
@@ -0,0 +1,823 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type ScannedChapterFile struct {
|
||||
Chapter []string // can be a single chapter or a range of chapters
|
||||
MangaTitle string // typically comes before the chapter number
|
||||
ChapterTitle string // typically comes after the chapter number
|
||||
Volume []string // typically comes after the chapter number
|
||||
IsPDF bool
|
||||
}
|
||||
|
||||
type TokenType int
|
||||
|
||||
const (
|
||||
TokenUnknown TokenType = iota
|
||||
TokenText
|
||||
TokenNumber
|
||||
TokenKeyword
|
||||
TokenSeparator
|
||||
TokenEnclosed
|
||||
TokenFileExtension
|
||||
)
|
||||
|
||||
// Token represents a parsed token from the filename
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Value string
|
||||
Position int
|
||||
IsChapter bool
|
||||
IsVolume bool
|
||||
}
|
||||
|
||||
// Lexer handles the tokenization of the filename
|
||||
type Lexer struct {
|
||||
input string
|
||||
position int
|
||||
tokens []Token
|
||||
currentToken int
|
||||
}
|
||||
|
||||
var ChapterKeywords = []string{
|
||||
"ch", "chp", "chapter", "chap", "c",
|
||||
}
|
||||
|
||||
var VolumeKeywords = []string{
|
||||
"v", "vol", "volume",
|
||||
}
|
||||
|
||||
var SeparatorChars = []rune{
|
||||
' ', '-', '_', '.', '[', ']', '(', ')', '{', '}', '~',
|
||||
}
|
||||
|
||||
var ImageExtensions = map[string]struct{}{
|
||||
".png": {},
|
||||
".jpg": {},
|
||||
".jpeg": {},
|
||||
".gif": {},
|
||||
".webp": {},
|
||||
".bmp": {},
|
||||
".tiff": {},
|
||||
".tif": {},
|
||||
}
|
||||
|
||||
// NewLexer creates a new lexer instance
|
||||
func NewLexer(input string) *Lexer {
|
||||
return &Lexer{
|
||||
input: strings.TrimSpace(input),
|
||||
tokens: make([]Token, 0),
|
||||
currentToken: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Tokenize breaks down the input into tokens
|
||||
func (l *Lexer) Tokenize() []Token {
|
||||
l.position = 0
|
||||
l.tokens = make([]Token, 0)
|
||||
|
||||
for l.position < len(l.input) {
|
||||
if l.isWhitespace(l.current()) {
|
||||
l.skipWhitespace()
|
||||
continue
|
||||
}
|
||||
|
||||
if l.isEnclosedStart(l.current()) {
|
||||
l.readEnclosed()
|
||||
continue
|
||||
}
|
||||
|
||||
if l.isSeparator(l.current()) {
|
||||
l.readSeparator()
|
||||
continue
|
||||
}
|
||||
|
||||
if l.isDigit(l.current()) {
|
||||
l.readNumber()
|
||||
continue
|
||||
}
|
||||
|
||||
if l.isLetter(l.current()) {
|
||||
l.readText()
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip unknown characters
|
||||
l.position++
|
||||
}
|
||||
|
||||
l.classifyTokens()
|
||||
return l.tokens
|
||||
}
|
||||
|
||||
// current returns the current character
|
||||
func (l *Lexer) current() rune {
|
||||
if l.position >= len(l.input) {
|
||||
return 0
|
||||
}
|
||||
return rune(l.input[l.position])
|
||||
}
|
||||
|
||||
// peek returns the next character without advancing
|
||||
func (l *Lexer) peek() rune {
|
||||
if l.position+1 >= len(l.input) {
|
||||
return 0
|
||||
}
|
||||
return rune(l.input[l.position+1])
|
||||
}
|
||||
|
||||
// advance moves to the next character
|
||||
func (l *Lexer) advance() {
|
||||
l.position++
|
||||
}
|
||||
|
||||
// isWhitespace checks if character is whitespace
|
||||
func (l *Lexer) isWhitespace(r rune) bool {
|
||||
return r == ' ' || r == '\t' || r == '\n' || r == '\r'
|
||||
}
|
||||
|
||||
// isSeparator checks if character is a separator
|
||||
func (l *Lexer) isSeparator(r rune) bool {
|
||||
for _, sep := range SeparatorChars {
|
||||
if r == sep {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isEnclosedStart checks if character starts an enclosed section
|
||||
func (l *Lexer) isEnclosedStart(r rune) bool {
|
||||
return r == '[' || r == '(' || r == '{'
|
||||
}
|
||||
|
||||
// isDigit checks if character is a digit
|
||||
func (l *Lexer) isDigit(r rune) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
||||
// isLetter checks if character is a letter
|
||||
func (l *Lexer) isLetter(r rune) bool {
|
||||
return (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z')
|
||||
}
|
||||
|
||||
// skipWhitespace skips all whitespace characters
|
||||
func (l *Lexer) skipWhitespace() {
|
||||
for l.position < len(l.input) && l.isWhitespace(l.current()) {
|
||||
l.advance()
|
||||
}
|
||||
}
|
||||
|
||||
// readEnclosed reads content within brackets/parentheses
|
||||
func (l *Lexer) readEnclosed() {
|
||||
start := l.position
|
||||
openChar := l.current()
|
||||
var closeChar rune
|
||||
|
||||
switch openChar {
|
||||
case '[':
|
||||
closeChar = ']'
|
||||
case '(':
|
||||
closeChar = ')'
|
||||
case '{':
|
||||
closeChar = '}'
|
||||
default:
|
||||
l.advance()
|
||||
return
|
||||
}
|
||||
|
||||
l.advance() // Skip opening character
|
||||
startContent := l.position
|
||||
|
||||
for l.position < len(l.input) && l.current() != closeChar {
|
||||
l.advance()
|
||||
}
|
||||
|
||||
if l.position < len(l.input) {
|
||||
content := l.input[startContent:l.position]
|
||||
l.advance() // Skip closing character
|
||||
|
||||
// Only add if content is meaningful
|
||||
if len(strings.TrimSpace(content)) > 0 {
|
||||
l.addToken(TokenEnclosed, content, start)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// readSeparator reads separator characters
|
||||
func (l *Lexer) readSeparator() {
|
||||
start := l.position
|
||||
value := string(l.current())
|
||||
l.advance()
|
||||
l.addToken(TokenSeparator, value, start)
|
||||
}
|
||||
|
||||
// readNumber reads numeric values (including decimals)
|
||||
func (l *Lexer) readNumber() {
|
||||
start := l.position
|
||||
|
||||
for l.position < len(l.input) && (l.isDigit(l.current()) || l.current() == '.') {
|
||||
// Stop if we hit a file extension
|
||||
if l.current() == '.' && l.position+1 < len(l.input) {
|
||||
// Check if this is followed by common file extensions
|
||||
remaining := l.input[l.position+1:]
|
||||
if strings.HasPrefix(remaining, "cbz") || strings.HasPrefix(remaining, "cbr") ||
|
||||
strings.HasPrefix(remaining, "pdf") || strings.HasPrefix(remaining, "epub") {
|
||||
break
|
||||
}
|
||||
}
|
||||
l.advance()
|
||||
}
|
||||
|
||||
value := l.input[start:l.position]
|
||||
l.addToken(TokenNumber, value, start)
|
||||
}
|
||||
|
||||
// readText reads alphabetic text
|
||||
func (l *Lexer) readText() {
|
||||
start := l.position
|
||||
|
||||
for l.position < len(l.input) && (l.isLetter(l.current()) || l.isDigit(l.current())) {
|
||||
l.advance()
|
||||
}
|
||||
|
||||
value := l.input[start:l.position]
|
||||
lowerValue := strings.ToLower(value) // Use lowercase for keyword checking
|
||||
|
||||
// Check if this might be a concatenated keyword that continues with a decimal
|
||||
if l.startsWithKeyword(lowerValue) && l.position < len(l.input) && l.current() == '.' {
|
||||
// Look ahead to see if there are more digits after the decimal
|
||||
tempPos := l.position + 1
|
||||
if tempPos < len(l.input) && l.isDigit(rune(l.input[tempPos])) {
|
||||
// Read the decimal part
|
||||
l.advance() // consume the '.'
|
||||
for l.position < len(l.input) && l.isDigit(l.current()) {
|
||||
l.advance()
|
||||
}
|
||||
// Update value to include decimal part
|
||||
value = l.input[start:l.position]
|
||||
lowerValue = strings.ToLower(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for concatenated keywords like "ch001", "c001", "chp001", "c12.5"
|
||||
if l.containsKeywordPrefix(lowerValue) {
|
||||
l.splitKeywordAndNumber(lowerValue, value, start) // Pass both versions
|
||||
} else {
|
||||
l.addToken(TokenText, value, start) // Use original case
|
||||
}
|
||||
}
|
||||
|
||||
// startsWithKeyword checks if text starts with any known keyword
|
||||
func (l *Lexer) startsWithKeyword(text string) bool {
|
||||
for _, keyword := range ChapterKeywords {
|
||||
if strings.HasPrefix(text, keyword) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
for _, keyword := range VolumeKeywords {
|
||||
if strings.HasPrefix(text, keyword) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// containsKeywordPrefix checks if text starts with a known keyword
|
||||
func (l *Lexer) containsKeywordPrefix(text string) bool {
|
||||
chKeywords := ChapterKeywords
|
||||
// Sort by length descending to match longer keywords first
|
||||
slices.SortFunc(chKeywords, func(a, b string) int {
|
||||
return len(b) - len(a) // Sort by length descending
|
||||
})
|
||||
for _, keyword := range ChapterKeywords {
|
||||
if strings.HasPrefix(text, keyword) && len(text) > len(keyword) {
|
||||
remaining := text[len(keyword):]
|
||||
// Check if remaining part is numeric (including decimals)
|
||||
if len(remaining) == 0 {
|
||||
return false
|
||||
}
|
||||
return l.isValidNumberPart(remaining)
|
||||
}
|
||||
}
|
||||
for _, keyword := range VolumeKeywords {
|
||||
if strings.HasPrefix(text, keyword) && len(text) > len(keyword) {
|
||||
remaining := text[len(keyword):]
|
||||
// Check if remaining part is numeric (including decimals)
|
||||
if len(remaining) == 0 {
|
||||
return false
|
||||
}
|
||||
return l.isValidNumberPart(remaining)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isValidNumberPart checks if string is valid number (including decimals)
|
||||
func (l *Lexer) isValidNumberPart(s string) bool {
|
||||
if len(s) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Don't allow starting with decimal
|
||||
if s[0] == '.' {
|
||||
return false
|
||||
}
|
||||
|
||||
hasDecimal := false
|
||||
for _, r := range s {
|
||||
if r == '.' {
|
||||
if hasDecimal {
|
||||
return false // Multiple decimals not allowed
|
||||
}
|
||||
hasDecimal = true
|
||||
} else if !l.isDigit(r) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// splitKeywordAndNumber splits concatenated keyword and number tokens
|
||||
func (l *Lexer) splitKeywordAndNumber(lowerText, originalText string, position int) {
|
||||
for _, keyword := range ChapterKeywords {
|
||||
if strings.HasPrefix(lowerText, keyword) && len(lowerText) > len(keyword) {
|
||||
// Use original case for the keyword part
|
||||
originalKeyword := originalText[:len(keyword)]
|
||||
l.addKeywordToken(originalKeyword, position, true, false)
|
||||
|
||||
// Extract number part (keeping original case/formatting)
|
||||
numberPart := originalText[len(keyword):]
|
||||
l.addToken(TokenNumber, numberPart, position+len(keyword))
|
||||
return
|
||||
}
|
||||
}
|
||||
for _, keyword := range VolumeKeywords {
|
||||
if strings.HasPrefix(lowerText, keyword) && len(lowerText) > len(keyword) {
|
||||
// Use original case for the keyword part
|
||||
originalKeyword := originalText[:len(keyword)]
|
||||
l.addKeywordToken(originalKeyword, position, false, true)
|
||||
|
||||
// Extract number part (keeping original case/formatting)
|
||||
numberPart := originalText[len(keyword):]
|
||||
l.addToken(TokenNumber, numberPart, position+len(keyword))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// addKeywordToken adds a keyword token with flags
|
||||
func (l *Lexer) addKeywordToken(value string, position int, isChapter, isVolume bool) {
|
||||
l.tokens = append(l.tokens, Token{
|
||||
Type: TokenKeyword,
|
||||
Value: value,
|
||||
Position: position,
|
||||
IsChapter: isChapter,
|
||||
IsVolume: isVolume,
|
||||
})
|
||||
}
|
||||
|
||||
// addToken adds a token to the list
|
||||
func (l *Lexer) addToken(tokenType TokenType, value string, position int) {
|
||||
l.tokens = append(l.tokens, Token{
|
||||
Type: tokenType,
|
||||
Value: value,
|
||||
Position: position,
|
||||
})
|
||||
}
|
||||
|
||||
// classifyTokens identifies chapter and volume keywords
|
||||
func (l *Lexer) classifyTokens() {
|
||||
for i := range l.tokens {
|
||||
token := &l.tokens[i]
|
||||
|
||||
// Check for chapter keywords (case insensitive)
|
||||
lowerValue := strings.ToLower(token.Value)
|
||||
for _, keyword := range ChapterKeywords {
|
||||
if lowerValue == keyword {
|
||||
token.Type = TokenKeyword
|
||||
token.IsChapter = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check for volume keywords (case insensitive)
|
||||
for _, keyword := range VolumeKeywords {
|
||||
if lowerValue == keyword {
|
||||
token.Type = TokenKeyword
|
||||
token.IsVolume = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check for file extensions
|
||||
if strings.Contains(lowerValue, "pdf") || strings.Contains(lowerValue, "cbz") ||
|
||||
strings.Contains(lowerValue, "cbr") || strings.Contains(lowerValue, "epub") {
|
||||
token.Type = TokenFileExtension
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parser handles the semantic analysis of tokens
|
||||
type Parser struct {
|
||||
tokens []Token
|
||||
result *ScannedChapterFile
|
||||
}
|
||||
|
||||
// NewParser creates a new parser instance
|
||||
func NewParser(tokens []Token) *Parser {
|
||||
return &Parser{
|
||||
tokens: tokens,
|
||||
result: &ScannedChapterFile{
|
||||
Chapter: make([]string, 0),
|
||||
Volume: make([]string, 0),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Parse performs semantic analysis on the tokens
|
||||
func (p *Parser) Parse() *ScannedChapterFile {
|
||||
p.extractChapters()
|
||||
p.extractVolumes()
|
||||
p.extractTitles()
|
||||
p.checkPDF()
|
||||
|
||||
return p.result
|
||||
}
|
||||
|
||||
// extractChapters finds and extracts chapter numbers
|
||||
func (p *Parser) extractChapters() {
|
||||
for i, token := range p.tokens {
|
||||
if token.IsChapter {
|
||||
// Look for numbers after chapter keyword
|
||||
for j := i + 1; j < len(p.tokens) && j < i+3; j++ {
|
||||
nextToken := p.tokens[j]
|
||||
if nextToken.Type == TokenNumber {
|
||||
p.addChapterNumber(nextToken.Value)
|
||||
break
|
||||
} else if nextToken.Type == TokenSeparator {
|
||||
continue
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if token.Type == TokenNumber && !token.IsVolume {
|
||||
// Standalone number might be a chapter
|
||||
if p.isLikelyChapterNumber(token, i) {
|
||||
p.addChapterNumber(token.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle ranges by looking for dash-separated numbers
|
||||
p.handleChapterRanges()
|
||||
}
|
||||
|
||||
// handleChapterRanges processes chapter ranges like "1-2" or "001-002"
|
||||
func (p *Parser) handleChapterRanges() {
|
||||
for i := 0; i < len(p.tokens)-2; i++ {
|
||||
if p.tokens[i].Type == TokenNumber &&
|
||||
p.tokens[i+1].Type == TokenSeparator && p.tokens[i+1].Value == "-" &&
|
||||
p.tokens[i+2].Type == TokenNumber {
|
||||
|
||||
// Check if first number is already a chapter
|
||||
firstIsChapter := false
|
||||
for _, ch := range p.result.Chapter {
|
||||
if ch == p.tokens[i].Value {
|
||||
firstIsChapter = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if firstIsChapter {
|
||||
// Add the second number as a chapter too
|
||||
p.result.Chapter = append(p.result.Chapter, p.tokens[i+2].Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// extractVolumes finds and extracts volume numbers
|
||||
func (p *Parser) extractVolumes() {
|
||||
for i, token := range p.tokens {
|
||||
if token.IsVolume {
|
||||
// Look for numbers after volume keyword
|
||||
for j := i + 1; j < len(p.tokens) && j < i+3; j++ {
|
||||
nextToken := p.tokens[j]
|
||||
if nextToken.Type == TokenNumber {
|
||||
p.result.Volume = append(p.result.Volume, nextToken.Value)
|
||||
break
|
||||
} else if nextToken.Type == TokenSeparator {
|
||||
continue
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// extractTitles finds manga title and chapter title
|
||||
func (p *Parser) extractTitles() {
|
||||
// Find first chapter keyword or number position
|
||||
chapterPos := -1
|
||||
for i, token := range p.tokens {
|
||||
if token.IsChapter || (token.Type == TokenNumber && p.isLikelyChapterNumber(token, i)) {
|
||||
chapterPos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if chapterPos > 0 {
|
||||
// Everything before chapter is likely manga title
|
||||
titleParts := make([]string, 0)
|
||||
for i := 0; i < chapterPos; i++ {
|
||||
token := p.tokens[i]
|
||||
if token.Type == TokenText && !token.IsVolume && !p.isIgnoredToken(token) {
|
||||
titleParts = append(titleParts, token.Value)
|
||||
} else if token.Type == TokenNumber && p.isNumberInTitle(token, i, chapterPos) {
|
||||
// Include numbers that are part of the title (but not volume indicators)
|
||||
titleParts = append(titleParts, token.Value)
|
||||
}
|
||||
}
|
||||
if len(titleParts) > 0 {
|
||||
p.result.MangaTitle = strings.Join(titleParts, " ")
|
||||
}
|
||||
|
||||
// Look for chapter title after chapter number
|
||||
p.extractChapterTitle(chapterPos)
|
||||
} else {
|
||||
// No clear chapter indicator, check if this is a "number - title" pattern
|
||||
if len(p.result.Chapter) > 0 && p.hasChapterTitlePattern() {
|
||||
p.extractChapterTitleFromPattern()
|
||||
} else {
|
||||
// Treat most text as manga title
|
||||
p.extractFallbackTitle()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// hasChapterTitlePattern checks for "number - title" pattern
|
||||
func (p *Parser) hasChapterTitlePattern() bool {
|
||||
for i := 0; i < len(p.tokens)-2; i++ {
|
||||
if p.tokens[i].Type == TokenNumber &&
|
||||
p.tokens[i+1].Type == TokenSeparator && p.tokens[i+1].Value == "-" &&
|
||||
i+2 < len(p.tokens) && p.tokens[i+2].Type == TokenText {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// extractChapterTitleFromPattern extracts title from "number - title" pattern
|
||||
func (p *Parser) extractChapterTitleFromPattern() {
|
||||
for i := 0; i < len(p.tokens)-2; i++ {
|
||||
if p.tokens[i].Type == TokenNumber &&
|
||||
p.tokens[i+1].Type == TokenSeparator && p.tokens[i+1].Value == "-" {
|
||||
|
||||
// Collect text after the dash
|
||||
titleParts := make([]string, 0)
|
||||
for j := i + 2; j < len(p.tokens); j++ {
|
||||
token := p.tokens[j]
|
||||
if token.Type == TokenText && !p.isIgnoredToken(token) {
|
||||
titleParts = append(titleParts, token.Value)
|
||||
} else if token.Type == TokenFileExtension {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(titleParts) > 0 {
|
||||
p.result.ChapterTitle = strings.Join(titleParts, " ")
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// extractFallbackTitle extracts title when no clear chapter indicators
|
||||
func (p *Parser) extractFallbackTitle() {
|
||||
titleParts := make([]string, 0)
|
||||
for _, token := range p.tokens {
|
||||
if token.Type == TokenText && !p.isIgnoredToken(token) {
|
||||
titleParts = append(titleParts, token.Value)
|
||||
}
|
||||
}
|
||||
if len(titleParts) > 0 {
|
||||
p.result.MangaTitle = strings.Join(titleParts, " ")
|
||||
}
|
||||
}
|
||||
|
||||
// addChapterNumber adds a chapter number, handling ranges
|
||||
func (p *Parser) addChapterNumber(value string) {
|
||||
// Check for range indicators in the surrounding tokens
|
||||
if strings.Contains(value, "-") {
|
||||
parts := strings.Split(value, "-")
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
p.result.Chapter = append(p.result.Chapter, strings.TrimSpace(part))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
p.result.Chapter = append(p.result.Chapter, value)
|
||||
}
|
||||
}
|
||||
|
||||
// isLikelyChapterNumber determines if a number token is likely a chapter
|
||||
func (p *Parser) isLikelyChapterNumber(token Token, position int) bool {
|
||||
// If we already have chapters from keywords, be more strict
|
||||
if len(p.result.Chapter) > 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check context - numbers at the start of filename are likely chapters
|
||||
if position < 3 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check if preceded by common patterns
|
||||
if position > 0 {
|
||||
prevToken := p.tokens[position-1]
|
||||
if prevToken.Type == TokenSeparator && (prevToken.Value == "-" || prevToken.Value == " ") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// isNumberInTitle determines if a number token should be part of the title
|
||||
func (p *Parser) isNumberInTitle(token Token, position int, chapterPos int) bool {
|
||||
// Don't include numbers that are right before the chapter position
|
||||
if position == chapterPos-1 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if this number looks like it's associated with volume
|
||||
if position > 0 {
|
||||
prevToken := p.tokens[position-1]
|
||||
if prevToken.IsVolume {
|
||||
return false // This number belongs to volume
|
||||
}
|
||||
}
|
||||
|
||||
// Small numbers (like 05, 2) that appear early in the title are likely part of title
|
||||
if position < 5 {
|
||||
if val := token.Value; len(val) <= 2 {
|
||||
// Check if this number looks like part of a title (e.g., "Title 05")
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isIgnoredToken checks if token should be ignored in titles
|
||||
func (p *Parser) isIgnoredToken(token Token) bool {
|
||||
ignoredWords := []string{"digital", "group", "scan", "scans", "team", "raw", "raws"}
|
||||
for _, word := range ignoredWords {
|
||||
if token.Value == word {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check for version indicators that shouldn't be in volume
|
||||
if strings.HasPrefix(token.Value, "v") && len(token.Value) > 1 {
|
||||
remaining := token.Value[1:]
|
||||
// If it's just "v" + digit, it might be version, not volume
|
||||
if len(remaining) > 0 && remaining[0] >= '0' && remaining[0] <= '9' {
|
||||
// Check context - if preceded by a number, it's likely a version
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// checkPDF sets the PDF flag if file is a PDF
|
||||
func (p *Parser) checkPDF() {
|
||||
for _, token := range p.tokens {
|
||||
if token.Type == TokenFileExtension && strings.Contains(token.Value, "pdf") {
|
||||
p.result.IsPDF = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// scanChapterFilename scans the filename and returns a chapter entry if it is a chapter.
|
||||
func scanChapterFilename(filename string) (res *ScannedChapterFile, ok bool) {
|
||||
// Create lexer and tokenize
|
||||
lexer := NewLexer(filename)
|
||||
tokens := lexer.Tokenize()
|
||||
|
||||
// Create parser and parse
|
||||
parser := NewParser(tokens)
|
||||
res = parser.Parse()
|
||||
|
||||
return res, true
|
||||
}
|
||||
|
||||
func isFileImage(filename string) bool {
|
||||
ext := strings.ToLower(filepath.Ext(filename))
|
||||
_, ok := ImageExtensions[ext]
|
||||
return ok
|
||||
}
|
||||
|
||||
// extractChapterTitle finds chapter title after chapter number
|
||||
func (p *Parser) extractChapterTitle(startPos int) {
|
||||
// Skip to after chapter number
|
||||
numberPos := -1
|
||||
for i := startPos; i < len(p.tokens); i++ {
|
||||
if p.tokens[i].Type == TokenNumber {
|
||||
numberPos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if numberPos == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
// Look for dash separator followed by text
|
||||
for i := numberPos + 1; i < len(p.tokens); i++ {
|
||||
token := p.tokens[i]
|
||||
if token.Type == TokenSeparator && token.Value == "-" {
|
||||
// Found dash, collect text after it
|
||||
titleParts := make([]string, 0)
|
||||
for j := i + 1; j < len(p.tokens); j++ {
|
||||
nextToken := p.tokens[j]
|
||||
if nextToken.Type == TokenText && !p.isIgnoredToken(nextToken) {
|
||||
titleParts = append(titleParts, nextToken.Value)
|
||||
} else if nextToken.Type == TokenFileExtension {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(titleParts) > 0 {
|
||||
p.result.ChapterTitle = strings.Join(titleParts, " ")
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
type ScannedPageFile struct {
|
||||
Number float64
|
||||
Filename string
|
||||
Ext string
|
||||
}
|
||||
|
||||
func parsePageFilename(filename string) (res *ScannedPageFile, ok bool) {
|
||||
res = &ScannedPageFile{
|
||||
Filename: filename,
|
||||
}
|
||||
|
||||
filename = strings.ToLower(filename)
|
||||
res.Ext = filepath.Ext(filename)
|
||||
filename = strings.TrimSuffix(filename, res.Ext)
|
||||
|
||||
if len(filename) == 0 {
|
||||
return res, false
|
||||
}
|
||||
|
||||
// Find number at the start
|
||||
// check if first rune is a digit
|
||||
numStr := ""
|
||||
if !unicode.IsDigit(rune(filename[0])) {
|
||||
// walk until non-digit
|
||||
for i := 0; i < len(filename); i++ {
|
||||
if !unicode.IsDigit(rune(filename[i])) && rune(filename[i]) != '.' {
|
||||
break
|
||||
}
|
||||
numStr += string(filename[i])
|
||||
}
|
||||
if len(numStr) > 0 {
|
||||
res.Number, _ = strconv.ParseFloat(numStr, 64)
|
||||
return res, true
|
||||
}
|
||||
}
|
||||
|
||||
// walk until first digit
|
||||
numStr = ""
|
||||
firstDigitIdx := strings.IndexFunc(filename, unicode.IsDigit)
|
||||
if firstDigitIdx != -1 {
|
||||
numStr += string(filename[firstDigitIdx])
|
||||
// walk until first non-digit or end
|
||||
for i := firstDigitIdx + 1; i < len(filename); i++ {
|
||||
if !unicode.IsDigit(rune(filename[i])) && rune(filename[i]) != '.' {
|
||||
break
|
||||
}
|
||||
numStr += string(filename[i])
|
||||
}
|
||||
if len(numStr) > 0 {
|
||||
res.Number, _ = strconv.ParseFloat(numStr, 64)
|
||||
return res, true
|
||||
}
|
||||
}
|
||||
|
||||
return res, false
|
||||
}
|
||||
483
seanime-2.9.10/internal/manga/providers/local_test.go
Normal file
483
seanime-2.9.10/internal/manga/providers/local_test.go
Normal file
@@ -0,0 +1,483 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestScanChapterFilename(t *testing.T) {
|
||||
tests := []struct {
|
||||
filename string
|
||||
expectedChapter []string
|
||||
expectedMangaTitle string
|
||||
expectedChapterTitle string
|
||||
expectedVolume []string
|
||||
}{
|
||||
{
|
||||
filename: "1.cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "2.5.pdf",
|
||||
expectedChapter: []string{"2.5"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Chapter 5.5.pdf",
|
||||
expectedChapter: []string{"5.5"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "ch 1.cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "ch 1.5-2.cbz",
|
||||
expectedChapter: []string{"1.5", "2"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Some title Chapter 1.cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "Some title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Chapter 23 The Fanatics.pdf",
|
||||
expectedChapter: []string{"23"},
|
||||
expectedMangaTitle: "The Fanatics",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "chapter_1.cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "1 - Some title.cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "Some title",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "30 - Some title.cbz",
|
||||
expectedChapter: []string{"30"},
|
||||
expectedMangaTitle: "",
|
||||
expectedChapterTitle: "Some title",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "[Group] Manga Title - c001 [123456].cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "[Group] Manga Title - c12.5 [654321].cbz",
|
||||
expectedChapter: []string{"12.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "[Group] Manga Title 05 - ch10.cbz",
|
||||
expectedChapter: []string{"10"},
|
||||
expectedMangaTitle: "Manga Title 05",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "[Group] Manga Title - ch10.cbz",
|
||||
expectedChapter: []string{"10"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "[Group] Manga Title - ch_11.cbz",
|
||||
expectedChapter: []string{"11"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "[Group] Manga Title - ch-12.cbz",
|
||||
expectedChapter: []string{"12"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title v01 c001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{"01"},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title v01 c001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{"01"},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 003.cbz",
|
||||
expectedChapter: []string{"003"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 003.5.cbz",
|
||||
expectedChapter: []string{"003.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 3.5 (Digital).cbz",
|
||||
expectedChapter: []string{"3.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 10 (Digital) [Group].cbz",
|
||||
expectedChapter: []string{"10"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp_15.cbz",
|
||||
expectedChapter: []string{"15"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp-16.cbz",
|
||||
expectedChapter: []string{"16"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp17.cbz",
|
||||
expectedChapter: []string{"17"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp 18.cbz",
|
||||
expectedChapter: []string{"18"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001 (v2).cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001v2.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{"2"},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001 [v2].cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001 [Digital] [v2].cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001-002.cbz",
|
||||
expectedChapter: []string{"001", "002"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001-001.5.cbz",
|
||||
expectedChapter: []string{"001", "001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1-2.cbz",
|
||||
expectedChapter: []string{"1", "2"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1.5-2.cbz",
|
||||
expectedChapter: []string{"1.5", "2"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1 (Sample).cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1 (Preview).cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1 (Special Edition).cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1 (Digital) (Official).cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1.cbz",
|
||||
expectedChapter: []string{"1"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 1.0.cbz",
|
||||
expectedChapter: []string{"1.0"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 01.cbz",
|
||||
expectedChapter: []string{"01"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 01.5.cbz",
|
||||
expectedChapter: []string{"01.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - 001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - ch001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - ch001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - ch_001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - ch_001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - ch-001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - ch-001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp_001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp_001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp-001.cbz",
|
||||
expectedChapter: []string{"001"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
{
|
||||
filename: "Manga Title - chp-001.5.cbz",
|
||||
expectedChapter: []string{"001.5"},
|
||||
expectedMangaTitle: "Manga Title",
|
||||
expectedChapterTitle: "",
|
||||
expectedVolume: []string{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.filename, func(t *testing.T) {
|
||||
res, ok := scanChapterFilename(tt.filename)
|
||||
if !ok {
|
||||
t.Errorf("Failed to scan chapter filename: %s", tt.filename)
|
||||
}
|
||||
require.Equalf(t, tt.expectedChapter, res.Chapter, "Expected chapter '%v' for '%s' but got '%v'", tt.expectedChapter, tt.filename, res.Chapter)
|
||||
require.Equalf(t, tt.expectedMangaTitle, res.MangaTitle, "Expected manga title '%v' for '%s' but got '%v'", tt.expectedMangaTitle, tt.filename, res.MangaTitle)
|
||||
require.Equalf(t, tt.expectedChapterTitle, res.ChapterTitle, "Expected chapter title '%v' for '%s' but got '%v'", tt.expectedChapterTitle, tt.filename, res.ChapterTitle)
|
||||
require.Equalf(t, tt.expectedVolume, res.Volume, "Expected volume '%v' for '%s' but got '%v'", tt.expectedVolume, tt.filename, res.Volume)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPageSorting(t *testing.T) {
|
||||
tests := []struct {
|
||||
expectedOrder []string
|
||||
}{
|
||||
{
|
||||
expectedOrder: []string{"1149-000.jpg", "1149-001.jpg", "1149-002.jpg", "1149-019.jpg", "1149-019b.jpg", "1149-020.jpg"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(fmt.Sprintf("%v", tt.expectedOrder), func(t *testing.T) {
|
||||
newSlice := tt.expectedOrder
|
||||
slices.SortFunc(newSlice, func(a, b string) int {
|
||||
return strings.Compare(a, b)
|
||||
})
|
||||
for i, filename := range tt.expectedOrder {
|
||||
require.Equalf(t, filename, newSlice[i], "Expected order '%v' for '%s' but got '%v'", tt.expectedOrder, tt.expectedOrder[i], filename)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParsePageFilename(t *testing.T) {
|
||||
tests := []struct {
|
||||
filename string
|
||||
expected float64
|
||||
}{
|
||||
{
|
||||
filename: "1.jpg",
|
||||
expected: 1,
|
||||
},
|
||||
{
|
||||
filename: "1.5.jpg",
|
||||
expected: 1.5,
|
||||
},
|
||||
{
|
||||
filename: "Page 001.jpg",
|
||||
expected: 1,
|
||||
},
|
||||
{
|
||||
filename: "1.55.jpg",
|
||||
expected: 1.55,
|
||||
},
|
||||
{
|
||||
filename: "2.5 -.jpg",
|
||||
expected: 2.5,
|
||||
},
|
||||
{
|
||||
filename: "page_27.jpg",
|
||||
expected: 27,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.filename, func(t *testing.T) {
|
||||
res, ok := parsePageFilename(tt.filename)
|
||||
if !ok {
|
||||
t.Errorf("Failed to parse page filename: %s", tt.filename)
|
||||
}
|
||||
require.Equalf(t, tt.expected, res.Number, "Expected number '%v' for '%s' but got '%v'", tt.expected, tt.filename, res.Number)
|
||||
})
|
||||
}
|
||||
}
|
||||
388
seanime-2.9.10/internal/manga/providers/mangadex.go
Normal file
388
seanime-2.9.10/internal/manga/providers/mangadex.go
Normal file
@@ -0,0 +1,388 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"net/url"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
Mangadex struct {
|
||||
Url string
|
||||
BaseUrl string
|
||||
UserAgent string
|
||||
Client *req.Client
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
|
||||
MangadexManga struct {
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Attributes MangadexMangeAttributes
|
||||
Relationships []MangadexMangaRelationship `json:"relationships"`
|
||||
}
|
||||
|
||||
MangadexMangeAttributes struct {
|
||||
AltTitles []map[string]string `json:"altTitles"`
|
||||
Title map[string]string `json:"title"`
|
||||
Year int `json:"year"`
|
||||
}
|
||||
|
||||
MangadexMangaRelationship struct {
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Related string `json:"related"`
|
||||
Attributes map[string]interface{} `json:"attributes"`
|
||||
}
|
||||
|
||||
MangadexErrorResponse struct {
|
||||
ID string `json:"id"`
|
||||
Status string `json:"status"`
|
||||
Code string `json:"code"`
|
||||
Title string `json:"title"`
|
||||
Detail string `json:"detail"`
|
||||
}
|
||||
|
||||
MangadexChapterData struct {
|
||||
ID string `json:"id"`
|
||||
Attributes MangadexChapterAttributes `json:"attributes"`
|
||||
}
|
||||
|
||||
MangadexChapterAttributes struct {
|
||||
Title string `json:"title"`
|
||||
Volume string `json:"volume"`
|
||||
Chapter string `json:"chapter"`
|
||||
UpdatedAt string `json:"updatedAt"`
|
||||
}
|
||||
)
|
||||
|
||||
// DEVNOTE: Each chapter ID is a unique string provided by Mangadex
|
||||
|
||||
func NewMangadex(logger *zerolog.Logger) *Mangadex {
|
||||
client := req.C().
|
||||
SetUserAgent(util.GetRandomUserAgent()).
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateChrome()
|
||||
|
||||
return &Mangadex{
|
||||
Url: "https://api.mangadex.org",
|
||||
BaseUrl: "https://mangadex.org",
|
||||
Client: client,
|
||||
UserAgent: util.GetRandomUserAgent(),
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (md *Mangadex) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (md *Mangadex) Search(opts hibikemanga.SearchOptions) ([]*hibikemanga.SearchResult, error) {
|
||||
ret := make([]*hibikemanga.SearchResult, 0)
|
||||
|
||||
retManga := make([]*MangadexManga, 0)
|
||||
|
||||
for i := range 1 {
|
||||
uri := fmt.Sprintf("%s/manga?title=%s&limit=25&offset=%d&order[relevance]=desc&contentRating[]=safe&contentRating[]=suggestive&includes[]=cover_art", md.Url, url.QueryEscape(opts.Query), 25*i)
|
||||
|
||||
var data struct {
|
||||
Data []*MangadexManga `json:"data"`
|
||||
}
|
||||
|
||||
resp, err := md.Client.R().
|
||||
SetHeader("Referer", "https://google.com").
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
md.logger.Error().Err(err).Msg("mangadex: Failed to send request")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
md.logger.Error().Str("status", resp.Status).Msg("mangadex: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
retManga = append(retManga, data.Data...)
|
||||
}
|
||||
|
||||
for _, manga := range retManga {
|
||||
var altTitles []string
|
||||
for _, title := range manga.Attributes.AltTitles {
|
||||
altTitle, ok := title["en"]
|
||||
if ok {
|
||||
altTitles = append(altTitles, altTitle)
|
||||
}
|
||||
altTitle, ok = title["jp"]
|
||||
if ok {
|
||||
altTitles = append(altTitles, altTitle)
|
||||
}
|
||||
altTitle, ok = title["ja"]
|
||||
if ok {
|
||||
altTitles = append(altTitles, altTitle)
|
||||
}
|
||||
}
|
||||
t := getTitle(manga.Attributes)
|
||||
|
||||
var img string
|
||||
for _, relation := range manga.Relationships {
|
||||
if relation.Type == "cover_art" {
|
||||
fn, ok := relation.Attributes["fileName"].(string)
|
||||
if ok {
|
||||
img = fmt.Sprintf("%s/covers/%s/%s.512.jpg", md.BaseUrl, manga.ID, fn)
|
||||
} else {
|
||||
img = fmt.Sprintf("%s/covers/%s/%s.jpg.512.jpg", md.BaseUrl, manga.ID, relation.ID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
format := strings.ToUpper(manga.Type)
|
||||
if format == "ADAPTATION" {
|
||||
format = "MANGA"
|
||||
}
|
||||
|
||||
compRes, _ := comparison.FindBestMatchWithSorensenDice(&opts.Query, []*string{&t})
|
||||
|
||||
result := &hibikemanga.SearchResult{
|
||||
ID: manga.ID,
|
||||
Title: t,
|
||||
Synonyms: altTitles,
|
||||
Image: img,
|
||||
Year: manga.Attributes.Year,
|
||||
SearchRating: compRes.Rating,
|
||||
Provider: string(MangadexProvider),
|
||||
}
|
||||
|
||||
ret = append(ret, result)
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
md.logger.Error().Msg("mangadex: No results found")
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
|
||||
md.logger.Info().Int("count", len(ret)).Msg("mangadex: Found results")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (md *Mangadex) FindChapters(id string) ([]*hibikemanga.ChapterDetails, error) {
|
||||
ret := make([]*hibikemanga.ChapterDetails, 0)
|
||||
|
||||
md.logger.Debug().Str("mangaId", id).Msg("mangadex: Finding chapters")
|
||||
|
||||
for page := 0; page <= 1; page++ {
|
||||
uri := fmt.Sprintf("%s/manga/%s/feed?limit=500&translatedLanguage%%5B%%5D=en&includes[]=scanlation_group&includes[]=user&order[volume]=desc&order[chapter]=desc&offset=%d&contentRating[]=safe&contentRating[]=suggestive&contentRating[]=erotica&contentRating[]=pornographic", md.Url, id, 500*page)
|
||||
|
||||
var data struct {
|
||||
Result string `json:"result"`
|
||||
Errors []MangadexErrorResponse `json:"errors"`
|
||||
Data []MangadexChapterData `json:"data"`
|
||||
}
|
||||
|
||||
resp, err := md.Client.R().
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
md.logger.Error().Err(err).Msg("mangadex: Failed to send request")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
md.logger.Error().Str("status", resp.Status).Msg("mangadex: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
if data.Result == "error" {
|
||||
md.logger.Error().Str("error", data.Errors[0].Title).Str("detail", data.Errors[0].Detail).Msg("mangadex: Could not find chapters")
|
||||
return nil, fmt.Errorf("could not find chapters: %s", data.Errors[0].Detail)
|
||||
}
|
||||
|
||||
slices.Reverse(data.Data)
|
||||
|
||||
chapterMap := make(map[string]*hibikemanga.ChapterDetails)
|
||||
idx := uint(len(ret))
|
||||
for _, chapter := range data.Data {
|
||||
|
||||
if chapter.Attributes.Chapter == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
title := "Chapter " + fmt.Sprintf("%s", chapter.Attributes.Chapter) + " "
|
||||
|
||||
if _, ok := chapterMap[chapter.Attributes.Chapter]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
chapterMap[chapter.Attributes.Chapter] = &hibikemanga.ChapterDetails{
|
||||
ID: chapter.ID,
|
||||
Title: title,
|
||||
Index: idx,
|
||||
Chapter: chapter.Attributes.Chapter,
|
||||
UpdatedAt: chapter.Attributes.UpdatedAt,
|
||||
Provider: string(MangadexProvider),
|
||||
}
|
||||
idx++
|
||||
}
|
||||
|
||||
chapters := make([]*hibikemanga.ChapterDetails, 0, len(chapterMap))
|
||||
for _, chapter := range chapterMap {
|
||||
chapters = append(chapters, chapter)
|
||||
}
|
||||
|
||||
slices.SortStableFunc(chapters, func(i, j *hibikemanga.ChapterDetails) int {
|
||||
return cmp.Compare(i.Index, j.Index)
|
||||
})
|
||||
|
||||
if len(chapters) > 0 {
|
||||
ret = append(ret, chapters...)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
md.logger.Error().Msg("mangadex: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
md.logger.Info().Int("count", len(ret)).Msg("mangadex: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (md *Mangadex) FindChapterPages(id string) ([]*hibikemanga.ChapterPage, error) {
|
||||
ret := make([]*hibikemanga.ChapterPage, 0)
|
||||
|
||||
md.logger.Debug().Str("chapterId", id).Msg("mangadex: Finding chapter pages")
|
||||
|
||||
uri := fmt.Sprintf("%s/at-home/server/%s", md.Url, id)
|
||||
|
||||
var data struct {
|
||||
BaseUrl string `json:"baseUrl"`
|
||||
Chapter struct {
|
||||
Hash string `json:"hash"`
|
||||
Data []string `json:"data"`
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := md.Client.R().
|
||||
SetHeader("User-Agent", util.GetRandomUserAgent()).
|
||||
SetSuccessResult(&data).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
md.logger.Error().Err(err).Msg("mangadex: Failed to get chapter pages")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
md.logger.Error().Str("status", resp.Status).Msg("mangadex: Request failed")
|
||||
return nil, fmt.Errorf("failed to decode response: status %s", resp.Status)
|
||||
}
|
||||
|
||||
for i, page := range data.Chapter.Data {
|
||||
ret = append(ret, &hibikemanga.ChapterPage{
|
||||
Provider: string(MangadexProvider),
|
||||
URL: fmt.Sprintf("%s/data/%s/%s", data.BaseUrl, data.Chapter.Hash, page),
|
||||
Index: i,
|
||||
Headers: map[string]string{
|
||||
"Referer": "https://mangadex.org",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
md.logger.Error().Msg("mangadex: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
md.logger.Info().Int("count", len(ret)).Msg("mangadex: Found pages")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
func getTitle(attributes MangadexMangeAttributes) string {
|
||||
altTitles := attributes.AltTitles
|
||||
title := attributes.Title
|
||||
|
||||
enTitle := title["en"]
|
||||
if enTitle != "" {
|
||||
return enTitle
|
||||
}
|
||||
|
||||
var enAltTitle string
|
||||
for _, altTitle := range altTitles {
|
||||
if value, ok := altTitle["en"]; ok {
|
||||
enAltTitle = value
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if enAltTitle != "" && util.IsMostlyLatinString(enAltTitle) {
|
||||
return enAltTitle
|
||||
}
|
||||
|
||||
// Check for other language titles
|
||||
if jaRoTitle, ok := title["ja-ro"]; ok {
|
||||
return jaRoTitle
|
||||
}
|
||||
if jpRoTitle, ok := title["jp-ro"]; ok {
|
||||
return jpRoTitle
|
||||
}
|
||||
if jpTitle, ok := title["jp"]; ok {
|
||||
return jpTitle
|
||||
}
|
||||
if jaTitle, ok := title["ja"]; ok {
|
||||
return jaTitle
|
||||
}
|
||||
if koTitle, ok := title["ko"]; ok {
|
||||
return koTitle
|
||||
}
|
||||
|
||||
// Check alt titles for other languages
|
||||
for _, altTitle := range altTitles {
|
||||
if value, ok := altTitle["ja-ro"]; ok {
|
||||
return value
|
||||
}
|
||||
}
|
||||
for _, altTitle := range altTitles {
|
||||
if value, ok := altTitle["jp-ro"]; ok {
|
||||
return value
|
||||
}
|
||||
}
|
||||
for _, altTitle := range altTitles {
|
||||
if value, ok := altTitle["jp"]; ok {
|
||||
return value
|
||||
}
|
||||
}
|
||||
for _, altTitle := range altTitles {
|
||||
if value, ok := altTitle["ja"]; ok {
|
||||
return value
|
||||
}
|
||||
}
|
||||
for _, altTitle := range altTitles {
|
||||
if value, ok := altTitle["ko"]; ok {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
153
seanime-2.9.10/internal/manga/providers/mangadex_test.go
Normal file
153
seanime-2.9.10/internal/manga/providers/mangadex_test.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMangadex_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "One Piece",
|
||||
query: "One Piece",
|
||||
},
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
query: "Jujutsu Kaisen",
|
||||
},
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
query: "Boku no Kokoro no Yabai Yatsu",
|
||||
},
|
||||
}
|
||||
|
||||
mangadex := NewMangadex(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := mangadex.Search(hibikemanga.SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "mangadex.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestMangadex_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
//{
|
||||
// name: "One Piece",
|
||||
// id: "One-Piece",
|
||||
// atLeast: 1100,
|
||||
//},
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
id: "c52b2ce3-7f95-469c-96b0-479524fb7a1a",
|
||||
atLeast: 250,
|
||||
},
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
id: "3df1a9a3-a1be-47a3-9e90-9b3e55b1d0ac",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
mangadex := NewMangadex(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := mangadex.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "mangadex.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestMangadex_FindChapterPages(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
chapterId string
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
id: "3df1a9a3-a1be-47a3-9e90-9b3e55b1d0ac",
|
||||
chapterId: "5145ea39-be4b-4bf9-81e7-4f90961db857", // Chapter 1
|
||||
},
|
||||
{
|
||||
name: "Kagurabachi",
|
||||
id: "",
|
||||
chapterId: "9c9652fc-10d2-40b3-9382-16fb072d3068", // Chapter 1
|
||||
},
|
||||
}
|
||||
|
||||
mangadex := NewMangadex(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
pages, err := mangadex.FindChapterPages(tt.chapterId)
|
||||
if assert.NoError(t, err, "mangadex.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
for _, page := range pages {
|
||||
t.Logf("Index: %d", page.Index)
|
||||
t.Logf("\tURL: %s", page.URL)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
220
seanime-2.9.10/internal/manga/providers/mangafire.go
Normal file
220
seanime-2.9.10/internal/manga/providers/mangafire.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gocolly/colly"
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
// DEVNOTE: Shelved due to WAF captcha
|
||||
|
||||
type (
|
||||
Mangafire struct {
|
||||
Url string
|
||||
Client *req.Client
|
||||
UserAgent string
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
)
|
||||
|
||||
func NewMangafire(logger *zerolog.Logger) *Mangafire {
|
||||
client := req.C().
|
||||
SetUserAgent(util.GetRandomUserAgent()).
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateChrome()
|
||||
|
||||
return &Mangafire{
|
||||
Url: "https://mangafire.to",
|
||||
Client: client,
|
||||
UserAgent: util.GetRandomUserAgent(),
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (mf *Mangafire) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (mf *Mangafire) Search(opts hibikemanga.SearchOptions) ([]*hibikemanga.SearchResult, error) {
|
||||
results := make([]*hibikemanga.SearchResult, 0)
|
||||
|
||||
mf.logger.Debug().Str("query", opts.Query).Msg("mangafire: Searching manga")
|
||||
|
||||
yearStr := ""
|
||||
if opts.Year > 0 {
|
||||
yearStr = fmt.Sprintf("&year=%%5B%%5D=%d", opts.Year)
|
||||
}
|
||||
uri := fmt.Sprintf("%s/filter?keyword=%s%s&sort=recently_updated", mf.Url, url.QueryEscape(opts.Query), yearStr)
|
||||
|
||||
c := colly.NewCollector(
|
||||
colly.UserAgent(util.GetRandomUserAgent()),
|
||||
)
|
||||
|
||||
c.WithTransport(mf.Client.Transport)
|
||||
|
||||
type ToVisit struct {
|
||||
ID string
|
||||
Title string
|
||||
Image string
|
||||
}
|
||||
toVisit := make([]ToVisit, 0)
|
||||
|
||||
c.OnHTML("main div.container div.original div.unit", func(e *colly.HTMLElement) {
|
||||
id := e.ChildAttr("a", "href")
|
||||
if len(toVisit) >= 15 || id == "" {
|
||||
return
|
||||
}
|
||||
title := ""
|
||||
e.ForEachWithBreak("div.info a", func(i int, e *colly.HTMLElement) bool {
|
||||
if i == 0 && e.Text != "" {
|
||||
title = strings.TrimSpace(e.Text)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
obj := ToVisit{
|
||||
ID: id,
|
||||
Title: title,
|
||||
Image: e.ChildAttr("img", "src"),
|
||||
}
|
||||
if obj.Title != "" && obj.ID != "" {
|
||||
toVisit = append(toVisit, obj)
|
||||
}
|
||||
})
|
||||
|
||||
err := c.Visit(uri)
|
||||
if err != nil {
|
||||
mf.logger.Error().Err(err).Msg("mangafire: Failed to visit")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(len(toVisit))
|
||||
|
||||
for _, v := range toVisit {
|
||||
go func(tv ToVisit) {
|
||||
defer wg.Done()
|
||||
|
||||
c2 := colly.NewCollector(
|
||||
colly.UserAgent(mf.UserAgent),
|
||||
)
|
||||
|
||||
c2.WithTransport(mf.Client.Transport)
|
||||
|
||||
result := &hibikemanga.SearchResult{
|
||||
Provider: MangafireProvider,
|
||||
}
|
||||
|
||||
// Synonyms
|
||||
c2.OnHTML("main div#manga-page div.info h6", func(e *colly.HTMLElement) {
|
||||
parts := strings.Split(e.Text, "; ")
|
||||
for i, v := range parts {
|
||||
parts[i] = strings.TrimSpace(v)
|
||||
}
|
||||
syn := strings.Join(parts, "")
|
||||
if syn != "" {
|
||||
result.Synonyms = append(result.Synonyms, syn)
|
||||
}
|
||||
})
|
||||
|
||||
// Year
|
||||
c2.OnHTML("main div#manga-page div.meta", func(e *colly.HTMLElement) {
|
||||
if result.Year != 0 || e.Text == "" {
|
||||
return
|
||||
}
|
||||
parts := strings.Split(e.Text, "Published: ")
|
||||
if len(parts) < 2 {
|
||||
return
|
||||
}
|
||||
parts2 := strings.Split(parts[1], " to")
|
||||
if len(parts2) < 2 {
|
||||
return
|
||||
}
|
||||
result.Year = util.StringToIntMust(strings.TrimSpace(parts2[0]))
|
||||
})
|
||||
|
||||
result.ID = tv.ID
|
||||
result.Title = tv.Title
|
||||
result.Image = tv.Image
|
||||
|
||||
err := c2.Visit(fmt.Sprintf("%s/%s", mf.Url, tv.ID))
|
||||
if err != nil {
|
||||
mf.logger.Error().Err(err).Str("id", tv.ID).Msg("mangafire: Failed to visit manga page")
|
||||
return
|
||||
}
|
||||
|
||||
// Comparison
|
||||
compTitles := []*string{&result.Title}
|
||||
for _, syn := range result.Synonyms {
|
||||
if !util.IsMostlyLatinString(syn) {
|
||||
continue
|
||||
}
|
||||
compTitles = append(compTitles, &syn)
|
||||
}
|
||||
compRes, _ := comparison.FindBestMatchWithSorensenDice(&opts.Query, compTitles)
|
||||
|
||||
result.SearchRating = compRes.Rating
|
||||
|
||||
results = append(results, result)
|
||||
}(v)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
if len(results) == 0 {
|
||||
mf.logger.Error().Str("query", opts.Query).Msg("mangafire: No results found")
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
|
||||
mf.logger.Info().Int("count", len(results)).Msg("mangafire: Found results")
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (mf *Mangafire) FindChapters(id string) ([]*hibikemanga.ChapterDetails, error) {
|
||||
ret := make([]*hibikemanga.ChapterDetails, 0)
|
||||
|
||||
mf.logger.Debug().Str("mangaId", id).Msg("mangafire: Finding chapters")
|
||||
|
||||
// code
|
||||
|
||||
if len(ret) == 0 {
|
||||
mf.logger.Error().Str("mangaId", id).Msg("mangafire: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
mf.logger.Info().Int("count", len(ret)).Msg("mangafire: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (mf *Mangafire) FindChapterPages(id string) ([]*hibikemanga.ChapterPage, error) {
|
||||
ret := make([]*hibikemanga.ChapterPage, 0)
|
||||
|
||||
mf.logger.Debug().Str("chapterId", id).Msg("mangafire: Finding chapter pages")
|
||||
|
||||
// code
|
||||
|
||||
if len(ret) == 0 {
|
||||
mf.logger.Error().Str("chapterId", id).Msg("mangafire: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
mf.logger.Info().Int("count", len(ret)).Msg("mangafire: Found pages")
|
||||
|
||||
return ret, nil
|
||||
|
||||
}
|
||||
132
seanime-2.9.10/internal/manga/providers/mangafire_test.go
Normal file
132
seanime-2.9.10/internal/manga/providers/mangafire_test.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMangafire_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
query: "Boku no Kokoro no Yabai Yatsu",
|
||||
},
|
||||
{
|
||||
name: "Dangers in My Heart",
|
||||
query: "Dangers in My Heart",
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewMangafire(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := provider.Search(hibikemanga.SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "provider.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestMangafire_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
id: "/manga/boku-no-kokoro-no-yabai-yatsu.vv882",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewMangafire(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := provider.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "provider.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//func TestMangafire_FindChapterPages(t *testing.T) {
|
||||
//
|
||||
// tests := []struct {
|
||||
// name string
|
||||
// chapterId string
|
||||
// }{
|
||||
// {
|
||||
// name: "The Dangers in My Heart",
|
||||
// chapterId: "", // Chapter 1
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// provider := NewMangafire(util.NewLogger())
|
||||
//
|
||||
// for _, tt := range tests {
|
||||
//
|
||||
// t.Run(tt.name, func(t *testing.T) {
|
||||
//
|
||||
// pages, err := provider.FindChapterPages(tt.chapterId)
|
||||
// if assert.NoError(t, err, "provider.FindChapterPages() error") {
|
||||
// assert.NotEmpty(t, pages, "pages is empty")
|
||||
//
|
||||
// for _, page := range pages {
|
||||
// t.Logf("Index: %d", page.Index)
|
||||
// t.Logf("\tURL: %s", page.URL)
|
||||
// t.Log("--------------------------------------------------")
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// })
|
||||
//
|
||||
// }
|
||||
//
|
||||
//}
|
||||
302
seanime-2.9.10/internal/manga/providers/manganato.go
Normal file
302
seanime-2.9.10/internal/manga/providers/manganato.go
Normal file
@@ -0,0 +1,302 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/url"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
Manganato struct {
|
||||
Url string
|
||||
Client *req.Client
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
|
||||
ManganatoSearchResult struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
NameUnsigned string `json:"nameunsigned"`
|
||||
LastChapter string `json:"lastchapter"`
|
||||
Image string `json:"image"`
|
||||
Author string `json:"author"`
|
||||
StoryLink string `json:"story_link"`
|
||||
}
|
||||
)
|
||||
|
||||
func NewManganato(logger *zerolog.Logger) *Manganato {
|
||||
client := req.C().
|
||||
SetUserAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36").
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateSafari()
|
||||
|
||||
return &Manganato{
|
||||
Url: "https://natomanga.com",
|
||||
Client: client,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (mp *Manganato) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (mp *Manganato) Search(opts hibikemanga.SearchOptions) (ret []*hibikemanga.SearchResult, err error) {
|
||||
ret = make([]*hibikemanga.SearchResult, 0)
|
||||
|
||||
mp.logger.Debug().Str("query", opts.Query).Msg("manganato: Searching manga")
|
||||
|
||||
q := opts.Query
|
||||
q = strings.ReplaceAll(q, " ", "_")
|
||||
q = strings.ToLower(q)
|
||||
q = strings.TrimSpace(q)
|
||||
q = url.QueryEscape(q)
|
||||
uri := fmt.Sprintf("https://natomanga.com/search/story/%s", q)
|
||||
|
||||
resp, err := mp.Client.R().
|
||||
SetHeader("User-Agent", util.GetRandomUserAgent()).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Str("uri", uri).Msg("manganato: Failed to send request")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
mp.logger.Error().Str("status", resp.Status).Str("uri", uri).Msg("manganato: Request failed")
|
||||
return nil, fmt.Errorf("failed to fetch search results: status %s", resp.Status)
|
||||
}
|
||||
|
||||
bodyBytes := resp.Bytes()
|
||||
|
||||
//mp.logger.Debug().Str("body", string(bodyBytes)).Msg("manganato: Response body")
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(bodyBytes))
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Msg("manganato: Failed to parse HTML")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
doc.Find("div.story_item").Each(func(i int, s *goquery.Selection) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
}
|
||||
}()
|
||||
|
||||
result := &hibikemanga.SearchResult{
|
||||
Provider: string(ManganatoProvider),
|
||||
}
|
||||
|
||||
href, exists := s.Find("a").Attr("href")
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(href, "https://natomanga.com/") &&
|
||||
!strings.HasPrefix(href, "https://www.natomanga.com/") &&
|
||||
!strings.HasPrefix(href, "https://www.chapmanganato.com/") &&
|
||||
!strings.HasPrefix(href, "https://chapmanganato.com/") {
|
||||
return
|
||||
}
|
||||
|
||||
result.ID = href
|
||||
splitHref := strings.Split(result.ID, "/")
|
||||
|
||||
if strings.Contains(href, "chapmanganato") {
|
||||
result.ID = "chapmanganato$"
|
||||
} else {
|
||||
result.ID = "manganato$"
|
||||
}
|
||||
|
||||
if len(splitHref) > 4 {
|
||||
result.ID += splitHref[4]
|
||||
}
|
||||
|
||||
result.Title = s.Find("h3.story_name").Text()
|
||||
result.Title = strings.TrimSpace(result.Title)
|
||||
result.Image, _ = s.Find("img").Attr("src")
|
||||
|
||||
compRes, _ := comparison.FindBestMatchWithSorensenDice(&opts.Query, []*string{&result.Title})
|
||||
result.SearchRating = compRes.Rating
|
||||
ret = append(ret, result)
|
||||
})
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("query", opts.Query).Msg("manganato: No results found")
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("manganato: Found results")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (mp *Manganato) FindChapters(id string) (ret []*hibikemanga.ChapterDetails, err error) {
|
||||
ret = make([]*hibikemanga.ChapterDetails, 0)
|
||||
|
||||
mp.logger.Debug().Str("mangaId", id).Msg("manganato: Finding chapters")
|
||||
|
||||
splitId := strings.Split(id, "$")
|
||||
if len(splitId) != 2 {
|
||||
mp.logger.Error().Str("mangaId", id).Msg("manganato: Invalid manga ID")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
uri := ""
|
||||
if splitId[0] == "manganato" {
|
||||
uri = fmt.Sprintf("https://natomanga.com/manga/%s", splitId[1])
|
||||
} else if splitId[0] == "chapmanganato" {
|
||||
uri = fmt.Sprintf("https://chapmanganato.com/manga/%s", splitId[1])
|
||||
}
|
||||
|
||||
resp, err := mp.Client.R().
|
||||
SetHeader("User-Agent", util.GetRandomUserAgent()).
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Str("uri", uri).Msg("manganato: Failed to send request")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
mp.logger.Error().Str("status", resp.Status).Str("uri", uri).Msg("manganato: Request failed")
|
||||
return nil, fmt.Errorf("failed to fetch chapters: status %s", resp.Status)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Msg("manganato: Failed to parse HTML")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
doc.Find(".chapter-list .row").Each(func(i int, s *goquery.Selection) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
}
|
||||
}()
|
||||
|
||||
name := s.Find("a").Text()
|
||||
if strings.HasPrefix(name, "Vol.") {
|
||||
split := strings.Split(name, " ")
|
||||
name = strings.Join(split[1:], " ")
|
||||
}
|
||||
|
||||
chStr := strings.TrimSpace(strings.Split(name, " ")[1])
|
||||
chStr = strings.TrimSuffix(chStr, ":")
|
||||
|
||||
href, exists := s.Find("a").Attr("href")
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
|
||||
hrefParts := strings.Split(href, "/")
|
||||
if len(hrefParts) < 6 {
|
||||
return
|
||||
}
|
||||
|
||||
chapterId := hrefParts[5]
|
||||
chapter := &hibikemanga.ChapterDetails{
|
||||
Provider: string(ManganatoProvider),
|
||||
ID: splitId[1] + "$" + chapterId,
|
||||
URL: href,
|
||||
Title: strings.TrimSpace(name),
|
||||
Chapter: chStr,
|
||||
}
|
||||
ret = append(ret, chapter)
|
||||
})
|
||||
|
||||
slices.Reverse(ret)
|
||||
for i, chapter := range ret {
|
||||
chapter.Index = uint(i)
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("mangaId", id).Msg("manganato: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("manganato: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (mp *Manganato) FindChapterPages(id string) (ret []*hibikemanga.ChapterPage, err error) {
|
||||
ret = make([]*hibikemanga.ChapterPage, 0)
|
||||
|
||||
mp.logger.Debug().Str("chapterId", id).Msg("manganato: Finding chapter pages")
|
||||
|
||||
splitId := strings.Split(id, "$")
|
||||
if len(splitId) != 2 {
|
||||
mp.logger.Error().Str("chapterId", id).Msg("manganato: Invalid chapter ID")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
uri := fmt.Sprintf("https://natomanga.com/manga/%s/%s", splitId[0], splitId[1])
|
||||
|
||||
resp, err := mp.Client.R().
|
||||
SetHeader("User-Agent", util.GetRandomUserAgent()).
|
||||
SetHeader("Referer", "https://natomanga.com/").
|
||||
Get(uri)
|
||||
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Str("uri", uri).Msg("manganato: Failed to send request")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
mp.logger.Error().Str("status", resp.Status).Str("uri", uri).Msg("manganato: Request failed")
|
||||
return nil, fmt.Errorf("failed to fetch chapter pages: status %s", resp.Status)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Msg("manganato: Failed to parse HTML")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
doc.Find(".container-chapter-reader img").Each(func(i int, s *goquery.Selection) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
}
|
||||
}()
|
||||
|
||||
src, exists := s.Attr("src")
|
||||
if !exists || src == "" {
|
||||
return
|
||||
}
|
||||
|
||||
page := &hibikemanga.ChapterPage{
|
||||
Provider: string(ManganatoProvider),
|
||||
URL: src,
|
||||
Index: len(ret),
|
||||
Headers: map[string]string{
|
||||
"Referer": "https://natomanga.com/",
|
||||
},
|
||||
}
|
||||
ret = append(ret, page)
|
||||
})
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("chapterId", id).Msg("manganato: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("manganato: Found pages")
|
||||
|
||||
return ret, nil
|
||||
|
||||
}
|
||||
130
seanime-2.9.10/internal/manga/providers/manganato_test.go
Normal file
130
seanime-2.9.10/internal/manga/providers/manganato_test.go
Normal file
@@ -0,0 +1,130 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestManganato_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
query: "Boku no Kokoro no Yabai Yatsu",
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewManganato(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := provider.Search(hibikemanga.SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "provider.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestManganato_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
id: "manganato$boku-no-kokoro-no-yabai-yatsu",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewManganato(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := provider.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "provider.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tID: %s", chapter.ID)
|
||||
t.Logf("\tChapter: %s", chapter.Chapter)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestManganato_FindChapterPages(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
chapterId string
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
chapterId: "boku-no-kokoro-no-yabai-yatsu$chapter-20", // Chapter 20
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewManganato(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
pages, err := provider.FindChapterPages(tt.chapterId)
|
||||
if assert.NoError(t, err, "provider.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
for _, page := range pages {
|
||||
t.Logf("Index: %d", page.Index)
|
||||
t.Logf("\tURL: %s", page.URL)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
235
seanime-2.9.10/internal/manga/providers/mangapill.go
Normal file
235
seanime-2.9.10/internal/manga/providers/mangapill.go
Normal file
@@ -0,0 +1,235 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gocolly/colly"
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type (
|
||||
Mangapill struct {
|
||||
Url string
|
||||
Client *req.Client
|
||||
UserAgent string
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
)
|
||||
|
||||
func NewMangapill(logger *zerolog.Logger) *Mangapill {
|
||||
client := req.C().
|
||||
SetUserAgent(util.GetRandomUserAgent()).
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateChrome()
|
||||
|
||||
return &Mangapill{
|
||||
Url: "https://mangapill.com",
|
||||
Client: client,
|
||||
UserAgent: util.GetRandomUserAgent(),
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// DEVNOTE: Unique ID
|
||||
// Each chapter ID has this format: {number}${slug} -- e.g. 6502-10004000$gokurakugai-chapter-4
|
||||
// The chapter ID is split by the $ character to reconstruct the chapter URL for subsequent requests
|
||||
|
||||
func (mp *Mangapill) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (mp *Mangapill) Search(opts hibikemanga.SearchOptions) (ret []*hibikemanga.SearchResult, err error) {
|
||||
ret = make([]*hibikemanga.SearchResult, 0)
|
||||
|
||||
mp.logger.Debug().Str("query", opts.Query).Msg("mangapill: Searching manga")
|
||||
|
||||
uri := fmt.Sprintf("%s/search?q=%s", mp.Url, url.QueryEscape(opts.Query))
|
||||
|
||||
c := colly.NewCollector(
|
||||
colly.UserAgent(mp.UserAgent),
|
||||
)
|
||||
|
||||
c.WithTransport(mp.Client.Transport)
|
||||
|
||||
c.OnHTML("div.container div.my-3.justify-end > div", func(e *colly.HTMLElement) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
}
|
||||
}()
|
||||
result := &hibikemanga.SearchResult{
|
||||
Provider: string(MangapillProvider),
|
||||
}
|
||||
|
||||
result.ID = strings.Split(e.ChildAttr("a", "href"), "/manga/")[1]
|
||||
result.ID = strings.Replace(result.ID, "/", "$", -1)
|
||||
|
||||
title := e.DOM.Find("div > a > div.mt-3").Text()
|
||||
result.Title = strings.TrimSpace(title)
|
||||
|
||||
altTitles := e.DOM.Find("div > a > div.text-xs.text-secondary").Text()
|
||||
if altTitles != "" {
|
||||
result.Synonyms = []string{strings.TrimSpace(altTitles)}
|
||||
}
|
||||
|
||||
compTitles := []*string{&result.Title}
|
||||
if len(result.Synonyms) > 0 {
|
||||
compTitles = append(compTitles, &result.Synonyms[0])
|
||||
}
|
||||
compRes, _ := comparison.FindBestMatchWithSorensenDice(&opts.Query, compTitles)
|
||||
result.SearchRating = compRes.Rating
|
||||
|
||||
result.Image = e.ChildAttr("a img", "data-src")
|
||||
|
||||
yearStr := e.DOM.Find("div > div.flex > div").Eq(1).Text()
|
||||
year, err := strconv.Atoi(strings.TrimSpace(yearStr))
|
||||
if err != nil {
|
||||
result.Year = 0
|
||||
} else {
|
||||
result.Year = year
|
||||
}
|
||||
|
||||
ret = append(ret, result)
|
||||
})
|
||||
|
||||
err = c.Visit(uri)
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Msg("mangapill: Failed to visit")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// code
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("query", opts.Query).Msg("mangapill: No results found")
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("mangapill: Found results")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (mp *Mangapill) FindChapters(id string) (ret []*hibikemanga.ChapterDetails, err error) {
|
||||
ret = make([]*hibikemanga.ChapterDetails, 0)
|
||||
|
||||
mp.logger.Debug().Str("mangaId", id).Msg("mangapill: Finding chapters")
|
||||
|
||||
uriId := strings.Replace(id, "$", "/", -1)
|
||||
uri := fmt.Sprintf("%s/manga/%s", mp.Url, uriId)
|
||||
|
||||
c := colly.NewCollector(
|
||||
colly.UserAgent(mp.UserAgent),
|
||||
)
|
||||
|
||||
c.WithTransport(mp.Client.Transport)
|
||||
|
||||
c.OnHTML("div.container div.border-border div#chapters div.grid-cols-1 a", func(e *colly.HTMLElement) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
}
|
||||
}()
|
||||
chapter := &hibikemanga.ChapterDetails{
|
||||
Provider: MangapillProvider,
|
||||
}
|
||||
|
||||
chapter.ID = strings.Split(e.Attr("href"), "/chapters/")[1]
|
||||
chapter.ID = strings.Replace(chapter.ID, "/", "$", -1)
|
||||
|
||||
chapter.Title = strings.TrimSpace(e.Text)
|
||||
|
||||
splitTitle := strings.Split(chapter.Title, "Chapter ")
|
||||
if len(splitTitle) < 2 {
|
||||
return
|
||||
}
|
||||
chapter.Chapter = splitTitle[1]
|
||||
|
||||
ret = append(ret, chapter)
|
||||
})
|
||||
|
||||
err = c.Visit(uri)
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Msg("mangapill: Failed to visit")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("mangaId", id).Msg("mangapill: No chapters found")
|
||||
return nil, ErrNoChapters
|
||||
}
|
||||
|
||||
slices.Reverse(ret)
|
||||
|
||||
for i, chapter := range ret {
|
||||
chapter.Index = uint(i)
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("mangapill: Found chapters")
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (mp *Mangapill) FindChapterPages(id string) (ret []*hibikemanga.ChapterPage, err error) {
|
||||
ret = make([]*hibikemanga.ChapterPage, 0)
|
||||
|
||||
mp.logger.Debug().Str("chapterId", id).Msg("mangapill: Finding chapter pages")
|
||||
|
||||
uriId := strings.Replace(id, "$", "/", -1)
|
||||
uri := fmt.Sprintf("%s/chapters/%s", mp.Url, uriId)
|
||||
|
||||
c := colly.NewCollector(
|
||||
colly.UserAgent(mp.UserAgent),
|
||||
)
|
||||
|
||||
c.WithTransport(mp.Client.Transport)
|
||||
|
||||
c.OnHTML("chapter-page", func(e *colly.HTMLElement) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
}
|
||||
}()
|
||||
page := &hibikemanga.ChapterPage{}
|
||||
|
||||
page.URL = e.DOM.Find("div picture img").AttrOr("data-src", "")
|
||||
if page.URL == "" {
|
||||
return
|
||||
}
|
||||
indexStr := e.DOM.Find("div[data-summary] > div").Text()
|
||||
index, _ := strconv.Atoi(strings.Split(strings.Split(indexStr, "page ")[1], "/")[0])
|
||||
page.Index = index - 1
|
||||
|
||||
page.Headers = map[string]string{
|
||||
"Referer": "https://mangapill.com/",
|
||||
}
|
||||
|
||||
ret = append(ret, page)
|
||||
})
|
||||
|
||||
err = c.Visit(uri)
|
||||
if err != nil {
|
||||
mp.logger.Error().Err(err).Msg("mangapill: Failed to visit")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(ret) == 0 {
|
||||
mp.logger.Error().Str("chapterId", id).Msg("mangapill: No pages found")
|
||||
return nil, ErrNoPages
|
||||
}
|
||||
|
||||
mp.logger.Info().Int("count", len(ret)).Msg("mangapill: Found pages")
|
||||
|
||||
return ret, nil
|
||||
|
||||
}
|
||||
128
seanime-2.9.10/internal/manga/providers/mangapill_test.go
Normal file
128
seanime-2.9.10/internal/manga/providers/mangapill_test.go
Normal file
@@ -0,0 +1,128 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMangapill_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "Boku no Kokoro no Yabai Yatsu",
|
||||
query: "Boku no Kokoro no Yabai Yatsu",
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewMangapill(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := provider.Search(hibikemanga.SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "provider.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestMangapill_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
id: "5232$boku-no-kokoro-no-yabai-yatsu",
|
||||
atLeast: 141,
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewMangapill(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := provider.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "provider.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestMangapill_FindChapterPages(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
chapterId string
|
||||
}{
|
||||
{
|
||||
name: "The Dangers in My Heart",
|
||||
chapterId: "5232-10001000$boku-no-kokoro-no-yabai-yatsu-chapter-1", // Chapter 1
|
||||
},
|
||||
}
|
||||
|
||||
provider := NewMangapill(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
pages, err := provider.FindChapterPages(tt.chapterId)
|
||||
if assert.NoError(t, err, "provider.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
for _, page := range pages {
|
||||
t.Logf("Index: %d", page.Index)
|
||||
t.Logf("\tURL: %s", page.URL)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
19
seanime-2.9.10/internal/manga/providers/providers.go
Normal file
19
seanime-2.9.10/internal/manga/providers/providers.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package manga_providers
|
||||
|
||||
import "errors"
|
||||
|
||||
const (
|
||||
WeebCentralProvider = "weebcentral"
|
||||
MangadexProvider string = "mangadex"
|
||||
ComickProvider string = "comick"
|
||||
MangapillProvider string = "mangapill"
|
||||
ManganatoProvider string = "manganato"
|
||||
MangafireProvider string = "mangafire"
|
||||
LocalProvider string = "local-manga"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNoResults = errors.New("no results found")
|
||||
ErrNoChapters = errors.New("no chapters found")
|
||||
ErrNoPages = errors.New("no pages found")
|
||||
)
|
||||
8
seanime-2.9.10/internal/manga/providers/proxy_images.go
Normal file
8
seanime-2.9.10/internal/manga/providers/proxy_images.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package manga_providers
|
||||
|
||||
import util "seanime/internal/util/proxies"
|
||||
|
||||
func GetImageByProxy(url string, headers map[string]string) ([]byte, error) {
|
||||
ip := &util.ImageProxy{}
|
||||
return ip.GetImage(url, headers)
|
||||
}
|
||||
309
seanime-2.9.10/internal/manga/providers/weebcentral.go
Normal file
309
seanime-2.9.10/internal/manga/providers/weebcentral.go
Normal file
@@ -0,0 +1,309 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
"seanime/internal/util"
|
||||
"seanime/internal/util/comparison"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/imroc/req/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
// WeebCentral implements the manga provider for WeebCentral
|
||||
// It uses goquery to scrape search results, chapter lists, and chapter pages.
|
||||
|
||||
type WeebCentral struct {
|
||||
Url string
|
||||
UserAgent string
|
||||
Client *req.Client
|
||||
logger *zerolog.Logger
|
||||
}
|
||||
|
||||
// NewWeebCentral initializes and returns a new WeebCentral provider instance.
|
||||
func NewWeebCentral(logger *zerolog.Logger) *WeebCentral {
|
||||
client := req.C().
|
||||
SetUserAgent(util.GetRandomUserAgent()).
|
||||
SetTimeout(60 * time.Second).
|
||||
EnableInsecureSkipVerify().
|
||||
ImpersonateChrome()
|
||||
|
||||
return &WeebCentral{
|
||||
Url: "https://weebcentral.com",
|
||||
UserAgent: util.GetRandomUserAgent(),
|
||||
Client: client,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (w *WeebCentral) GetSettings() hibikemanga.Settings {
|
||||
return hibikemanga.Settings{
|
||||
SupportsMultiScanlator: false,
|
||||
SupportsMultiLanguage: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (w *WeebCentral) Search(opts hibikemanga.SearchOptions) ([]*hibikemanga.SearchResult, error) {
|
||||
w.logger.Debug().Str("query", opts.Query).Msg("weebcentral: Searching manga")
|
||||
|
||||
searchUrl := fmt.Sprintf("%s/search/simple?location=main", w.Url)
|
||||
form := url.Values{}
|
||||
form.Set("text", opts.Query)
|
||||
|
||||
resp, err := w.Client.R().
|
||||
SetContentType("application/x-www-form-urlencoded").
|
||||
SetHeader("HX-Request", "true").
|
||||
SetHeader("HX-Trigger", "quick-search-input").
|
||||
SetHeader("HX-Trigger-Name", "text").
|
||||
SetHeader("HX-Target", "quick-search-result").
|
||||
SetHeader("HX-Current-URL", w.Url+"/").
|
||||
SetBody(form.Encode()).
|
||||
Post(searchUrl)
|
||||
|
||||
if err != nil {
|
||||
w.logger.Error().Err(err).Msg("weebcentral: Failed to send search request")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
w.logger.Error().Str("status", resp.Status).Msg("weebcentral: Search request failed")
|
||||
return nil, fmt.Errorf("search request failed: status %s", resp.Status)
|
||||
}
|
||||
|
||||
body := resp.String()
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(body))
|
||||
if err != nil {
|
||||
w.logger.Error().Err(err).Msg("weebcentral: Failed to parse search HTML")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var searchResults []*hibikemanga.SearchResult
|
||||
doc.Find("#quick-search-result > div > a").Each(func(i int, s *goquery.Selection) {
|
||||
link, exists := s.Attr("href")
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
title := strings.TrimSpace(s.Find(".flex-1").Text())
|
||||
|
||||
var image string
|
||||
if s.Find("source").Length() > 0 {
|
||||
image, _ = s.Find("source").Attr("srcset")
|
||||
} else if s.Find("img").Length() > 0 {
|
||||
image, _ = s.Find("img").Attr("src")
|
||||
}
|
||||
|
||||
// Extract manga id from link assuming the format contains '/series/{id}/'
|
||||
idPart := ""
|
||||
parts := strings.Split(link, "/series/")
|
||||
if len(parts) > 1 {
|
||||
subparts := strings.Split(parts[1], "/")
|
||||
idPart = subparts[0]
|
||||
}
|
||||
if idPart == "" {
|
||||
return
|
||||
}
|
||||
|
||||
titleCopy := title
|
||||
titles := []*string{&titleCopy}
|
||||
compRes, ok := comparison.FindBestMatchWithSorensenDice(&opts.Query, titles)
|
||||
if !ok || compRes.Rating < 0.6 {
|
||||
return
|
||||
}
|
||||
|
||||
searchResults = append(searchResults, &hibikemanga.SearchResult{
|
||||
ID: idPart,
|
||||
Title: title,
|
||||
Synonyms: []string{},
|
||||
Year: 0,
|
||||
Image: image,
|
||||
Provider: WeebCentralProvider,
|
||||
SearchRating: compRes.Rating,
|
||||
})
|
||||
})
|
||||
|
||||
if len(searchResults) == 0 {
|
||||
w.logger.Error().Msg("weebcentral: No search results found")
|
||||
return nil, errors.New("no results found")
|
||||
}
|
||||
|
||||
w.logger.Info().Int("count", len(searchResults)).Msg("weebcentral: Found search results")
|
||||
return searchResults, nil
|
||||
}
|
||||
|
||||
func (w *WeebCentral) FindChapters(mangaId string) ([]*hibikemanga.ChapterDetails, error) {
|
||||
w.logger.Debug().Str("mangaId", mangaId).Msg("weebcentral: Fetching chapters")
|
||||
|
||||
chapterUrl := fmt.Sprintf("%s/series/%s/full-chapter-list", w.Url, mangaId)
|
||||
|
||||
resp, err := w.Client.R().
|
||||
SetHeader("HX-Request", "true").
|
||||
SetHeader("HX-Target", "chapter-list").
|
||||
SetHeader("HX-Current-URL", fmt.Sprintf("%s/series/%s", w.Url, mangaId)).
|
||||
SetHeader("Referer", fmt.Sprintf("%s/series/%s", w.Url, mangaId)).
|
||||
Get(chapterUrl)
|
||||
|
||||
if err != nil {
|
||||
w.logger.Error().Err(err).Msg("weebcentral: Failed to fetch chapter list")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
w.logger.Error().Str("status", resp.Status).Msg("weebcentral: Chapter list request failed")
|
||||
return nil, fmt.Errorf("chapter list request failed: status %s", resp.Status)
|
||||
}
|
||||
|
||||
body := resp.String()
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(body))
|
||||
if err != nil {
|
||||
w.logger.Error().Err(err).Msg("weebcentral: Failed to parse chapter list HTML")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var chapters []*hibikemanga.ChapterDetails
|
||||
volumeCounter := 1
|
||||
lastChapterNumber := 9999.0
|
||||
|
||||
chapterRegex := regexp.MustCompile("(\\d+(?:\\.\\d+)?)")
|
||||
|
||||
doc.Find("div.flex.items-center").Each(func(i int, s *goquery.Selection) {
|
||||
a := s.Find("a")
|
||||
chapterUrl, exists := a.Attr("href")
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
chapterTitle := strings.TrimSpace(a.Find("span.grow > span").First().Text())
|
||||
|
||||
var chapterNumber string
|
||||
var parsedChapterNumber float64
|
||||
|
||||
match := chapterRegex.FindStringSubmatch(chapterTitle)
|
||||
if len(match) > 1 {
|
||||
chapterNumber = w.cleanChapterNumber(match[1])
|
||||
if num, err := strconv.ParseFloat(chapterNumber, 64); err == nil {
|
||||
parsedChapterNumber = num
|
||||
}
|
||||
} else {
|
||||
chapterNumber = ""
|
||||
}
|
||||
|
||||
if parsedChapterNumber > lastChapterNumber {
|
||||
volumeCounter++
|
||||
}
|
||||
if parsedChapterNumber != 0 {
|
||||
lastChapterNumber = parsedChapterNumber
|
||||
}
|
||||
|
||||
// Extract chapter id from the URL assuming format contains '/chapters/{id}'
|
||||
chapterId := ""
|
||||
parts := strings.Split(chapterUrl, "/chapters/")
|
||||
if len(parts) > 1 {
|
||||
chapterId = parts[1]
|
||||
}
|
||||
|
||||
chapters = append(chapters, &hibikemanga.ChapterDetails{
|
||||
ID: chapterId,
|
||||
URL: chapterUrl,
|
||||
Title: chapterTitle,
|
||||
Chapter: chapterNumber,
|
||||
Index: uint(i),
|
||||
Provider: WeebCentralProvider,
|
||||
})
|
||||
})
|
||||
|
||||
if len(chapters) == 0 {
|
||||
w.logger.Error().Msg("weebcentral: No chapters found")
|
||||
return nil, errors.New("no chapters found")
|
||||
}
|
||||
|
||||
slices.Reverse(chapters)
|
||||
|
||||
for i := range chapters {
|
||||
chapters[i].Index = uint(i)
|
||||
}
|
||||
|
||||
w.logger.Info().Int("count", len(chapters)).Msg("weebcentral: Found chapters")
|
||||
return chapters, nil
|
||||
}
|
||||
|
||||
func (w *WeebCentral) FindChapterPages(chapterId string) ([]*hibikemanga.ChapterPage, error) {
|
||||
url := fmt.Sprintf("%s/chapters/%s/images?is_prev=False&reading_style=long_strip", w.Url, chapterId)
|
||||
|
||||
resp, err := w.Client.R().
|
||||
SetHeader("HX-Request", "true").
|
||||
SetHeader("HX-Current-URL", fmt.Sprintf("%s/chapters/%s", w.Url, chapterId)).
|
||||
SetHeader("Referer", fmt.Sprintf("%s/chapters/%s", w.Url, chapterId)).
|
||||
Get(url)
|
||||
|
||||
if err != nil {
|
||||
w.logger.Error().Err(err).Msg("weebcentral: Failed to fetch chapter pages")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !resp.IsSuccessState() {
|
||||
w.logger.Error().Str("status", resp.Status).Msg("weebcentral: Chapter pages request failed")
|
||||
return nil, fmt.Errorf("chapter pages request failed: status %s", resp.Status)
|
||||
}
|
||||
|
||||
body := resp.String()
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(body))
|
||||
if err != nil {
|
||||
w.logger.Error().Err(err).Msg("weebcentral: Failed to parse chapter pages HTML")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pages []*hibikemanga.ChapterPage
|
||||
totalImgs := doc.Find("img").Length()
|
||||
|
||||
doc.Find("section.flex-1 img").Each(func(i int, s *goquery.Selection) {
|
||||
imageUrl, exists := s.Attr("src")
|
||||
if !exists || imageUrl == "" {
|
||||
return
|
||||
}
|
||||
pages = append(pages, &hibikemanga.ChapterPage{
|
||||
URL: imageUrl,
|
||||
Index: i,
|
||||
Headers: map[string]string{"Referer": w.Url},
|
||||
Provider: WeebCentralProvider,
|
||||
})
|
||||
})
|
||||
|
||||
if len(pages) == 0 && totalImgs > 0 {
|
||||
doc.Find("img").Each(func(i int, s *goquery.Selection) {
|
||||
imageUrl, exists := s.Attr("src")
|
||||
if !exists || imageUrl == "" {
|
||||
return
|
||||
}
|
||||
pages = append(pages, &hibikemanga.ChapterPage{
|
||||
URL: imageUrl,
|
||||
Index: i,
|
||||
Headers: map[string]string{"Referer": w.Url},
|
||||
Provider: WeebCentralProvider,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if len(pages) == 0 {
|
||||
w.logger.Error().Msg("weebcentral: No pages found")
|
||||
return nil, errors.New("no pages found")
|
||||
}
|
||||
|
||||
w.logger.Info().Int("count", len(pages)).Msg("weebcentral: Found chapter pages")
|
||||
return pages, nil
|
||||
}
|
||||
|
||||
func (w *WeebCentral) cleanChapterNumber(chapterStr string) string {
|
||||
cleaned := strings.TrimLeft(chapterStr, "0")
|
||||
if cleaned == "" {
|
||||
return "0"
|
||||
}
|
||||
return cleaned
|
||||
}
|
||||
162
seanime-2.9.10/internal/manga/providers/weebcentral_test.go
Normal file
162
seanime-2.9.10/internal/manga/providers/weebcentral_test.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package manga_providers
|
||||
|
||||
import (
|
||||
"seanime/internal/util"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
hibikemanga "seanime/internal/extension/hibike/manga"
|
||||
)
|
||||
|
||||
func TestWeebCentral_Search(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
}{
|
||||
{
|
||||
name: "One Piece",
|
||||
query: "One Piece",
|
||||
},
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
query: "Jujutsu Kaisen",
|
||||
},
|
||||
}
|
||||
|
||||
weebcentral := NewWeebCentral(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
searchRes, err := weebcentral.Search(hibikemanga.SearchOptions{
|
||||
Query: tt.query,
|
||||
})
|
||||
if assert.NoError(t, err, "weebcentral.Search() error") {
|
||||
assert.NotEmpty(t, searchRes, "search result is empty")
|
||||
|
||||
for _, res := range searchRes {
|
||||
t.Logf("Title: %s", res.Title)
|
||||
t.Logf("\tID: %s", res.ID)
|
||||
t.Logf("\tYear: %d", res.Year)
|
||||
t.Logf("\tImage: %s", res.Image)
|
||||
t.Logf("\tProvider: %s", res.Provider)
|
||||
t.Logf("\tSearchRating: %f", res.SearchRating)
|
||||
t.Logf("\tSynonyms: %v", res.Synonyms)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestWeebCentral_FindChapters(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
atLeast int
|
||||
}{
|
||||
{
|
||||
name: "One Piece",
|
||||
id: "01J76XY7E9FNDZ1DBBM6PBJPFK",
|
||||
atLeast: 1100,
|
||||
},
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
id: "01J76XYCERXE60T7FKXVCCAQ0H",
|
||||
atLeast: 250,
|
||||
},
|
||||
}
|
||||
|
||||
weebcentral := NewWeebCentral(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := weebcentral.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "weebcentral.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
assert.GreaterOrEqual(t, len(chapters), tt.atLeast, "chapters length is less than expected")
|
||||
|
||||
for _, chapter := range chapters {
|
||||
t.Logf("Title: %s", chapter.Title)
|
||||
t.Logf("\tSlug: %s", chapter.ID)
|
||||
t.Logf("\tURL: %s", chapter.URL)
|
||||
t.Logf("\tIndex: %d", chapter.Index)
|
||||
t.Logf("\tChapter: %s", chapter.Chapter)
|
||||
t.Logf("\tUpdatedAt: %s", chapter.UpdatedAt)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestWeebCentral_FindChapterPages(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id string
|
||||
index uint
|
||||
}{
|
||||
{
|
||||
name: "One Piece",
|
||||
id: "01J76XY7E9FNDZ1DBBM6PBJPFK",
|
||||
index: 1110,
|
||||
},
|
||||
{
|
||||
name: "Jujutsu Kaisen",
|
||||
id: "01J76XYCERXE60T7FKXVCCAQ0H",
|
||||
index: 0,
|
||||
},
|
||||
}
|
||||
|
||||
weebcentral := NewWeebCentral(util.NewLogger())
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
chapters, err := weebcentral.FindChapters(tt.id)
|
||||
if assert.NoError(t, err, "weebcentral.FindChapters() error") {
|
||||
|
||||
assert.NotEmpty(t, chapters, "chapters is empty")
|
||||
|
||||
var chapterInfo *hibikemanga.ChapterDetails
|
||||
for _, chapter := range chapters {
|
||||
if chapter.Index == tt.index {
|
||||
chapterInfo = chapter
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if assert.NotNil(t, chapterInfo, "chapter not found") {
|
||||
pages, err := weebcentral.FindChapterPages(chapterInfo.ID)
|
||||
if assert.NoError(t, err, "weebcentral.FindChapterPages() error") {
|
||||
assert.NotEmpty(t, pages, "pages is empty")
|
||||
|
||||
for _, page := range pages {
|
||||
t.Logf("Index: %d", page.Index)
|
||||
t.Logf("\tURL: %s", page.URL)
|
||||
t.Log("--------------------------------------------------")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
186
seanime-2.9.10/internal/manga/repository.go
Normal file
186
seanime-2.9.10/internal/manga/repository.go
Normal file
@@ -0,0 +1,186 @@
|
||||
package manga
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"image"
|
||||
_ "image/jpeg" // Register JPEG format
|
||||
_ "image/png" // Register PNG format
|
||||
"net/http"
|
||||
"seanime/internal/database/db"
|
||||
"seanime/internal/database/models"
|
||||
"seanime/internal/events"
|
||||
"seanime/internal/extension"
|
||||
"seanime/internal/util/filecache"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
_ "golang.org/x/image/bmp" // Register BMP format
|
||||
_ "golang.org/x/image/tiff" // Register Tiff format
|
||||
_ "golang.org/x/image/webp" // Register WebP format
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNoResults = errors.New("no results found for this media")
|
||||
ErrNoChapters = errors.New("no manga chapters found")
|
||||
ErrChapterNotFound = errors.New("chapter not found")
|
||||
ErrChapterNotDownloaded = errors.New("chapter not downloaded")
|
||||
ErrNoTitlesProvided = errors.New("no titles provided")
|
||||
)
|
||||
|
||||
type (
|
||||
Repository struct {
|
||||
logger *zerolog.Logger
|
||||
fileCacher *filecache.Cacher
|
||||
cacheDir string
|
||||
providerExtensionBank *extension.UnifiedBank
|
||||
serverUri string
|
||||
wsEventManager events.WSEventManagerInterface
|
||||
mu sync.Mutex
|
||||
downloadDir string
|
||||
db *db.Database
|
||||
|
||||
settings *models.Settings
|
||||
}
|
||||
|
||||
NewRepositoryOptions struct {
|
||||
Logger *zerolog.Logger
|
||||
CacheDir string
|
||||
FileCacher *filecache.Cacher
|
||||
ServerURI string
|
||||
WsEventManager events.WSEventManagerInterface
|
||||
DownloadDir string
|
||||
Database *db.Database
|
||||
}
|
||||
)
|
||||
|
||||
func NewRepository(opts *NewRepositoryOptions) *Repository {
|
||||
r := &Repository{
|
||||
logger: opts.Logger,
|
||||
fileCacher: opts.FileCacher,
|
||||
cacheDir: opts.CacheDir,
|
||||
serverUri: opts.ServerURI,
|
||||
wsEventManager: opts.WsEventManager,
|
||||
downloadDir: opts.DownloadDir,
|
||||
providerExtensionBank: extension.NewUnifiedBank(),
|
||||
db: opts.Database,
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *Repository) SetSettings(settings *models.Settings) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
r.settings = settings
|
||||
}
|
||||
|
||||
func (r *Repository) InitExtensionBank(bank *extension.UnifiedBank) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
r.providerExtensionBank = bank
|
||||
r.logger.Debug().Msg("manga: Initialized provider extension bank")
|
||||
}
|
||||
|
||||
func (r *Repository) RemoveProvider(id string) {
|
||||
r.providerExtensionBank.Delete(id)
|
||||
}
|
||||
|
||||
func (r *Repository) GetProviderExtensionBank() *extension.UnifiedBank {
|
||||
return r.providerExtensionBank
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// File Cache
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
type bucketType string
|
||||
|
||||
const (
|
||||
bucketTypeChapterKey = "1"
|
||||
bucketTypeChapter bucketType = "chapters"
|
||||
bucketTypePage bucketType = "pages"
|
||||
bucketTypePageDimensions bucketType = "page-dimensions"
|
||||
)
|
||||
|
||||
// getFcProviderBucket returns a bucket for the provider and mediaId.
|
||||
//
|
||||
// e.g., manga_comick_chapters_123, manga_mangasee_pages_456
|
||||
//
|
||||
// Note: Each bucket contains only 1 key-value pair.
|
||||
func (r *Repository) getFcProviderBucket(provider string, mediaId int, bucketType bucketType) filecache.Bucket {
|
||||
return filecache.NewBucket("manga_"+provider+"_"+string(bucketType)+"_"+strconv.Itoa(mediaId), time.Hour*24*7)
|
||||
}
|
||||
|
||||
// EmptyMangaCache deletes all manga buckets associated with the given mediaId.
|
||||
func (r *Repository) EmptyMangaCache(mediaId int) (err error) {
|
||||
// Empty the manga cache
|
||||
err = r.fileCacher.RemoveAllBy(func(filename string) bool {
|
||||
return strings.HasPrefix(filename, "manga_") && strings.Contains(filename, strconv.Itoa(mediaId))
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
func ParseChapterContainerFileName(filename string) (provider string, bucketType bucketType, mediaId int, ok bool) {
|
||||
filename = strings.TrimSuffix(filename, ".json")
|
||||
filename = strings.TrimSuffix(filename, ".cache")
|
||||
filename = strings.TrimSuffix(filename, ".txt")
|
||||
parts := strings.Split(filename, "_")
|
||||
if len(parts) != 4 {
|
||||
return "", "", 0, false
|
||||
}
|
||||
|
||||
provider = parts[1]
|
||||
var err error
|
||||
mediaId, err = strconv.Atoi(parts[3])
|
||||
if err != nil {
|
||||
return "", "", 0, false
|
||||
}
|
||||
|
||||
switch parts[2] {
|
||||
case "chapters":
|
||||
bucketType = bucketTypeChapter
|
||||
case "pages":
|
||||
bucketType = bucketTypePage
|
||||
case "page-dimensions":
|
||||
bucketType = bucketTypePageDimensions
|
||||
default:
|
||||
return "", "", 0, false
|
||||
}
|
||||
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
func getImageNaturalSize(url string) (int, int, error) {
|
||||
// Fetch the image
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Decode the image
|
||||
img, _, err := image.DecodeConfig(resp.Body)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// Return the natural size
|
||||
return img.Width, img.Height, nil
|
||||
}
|
||||
|
||||
func getImageNaturalSizeB(data []byte) (int, int, error) {
|
||||
// Decode the image
|
||||
img, _, err := image.DecodeConfig(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// Return the natural size
|
||||
return img.Width, img.Height, nil
|
||||
}
|
||||
Reference in New Issue
Block a user