node build fixed

This commit is contained in:
ra_ma
2025-09-20 14:08:38 +01:00
parent c6ebbe069d
commit 3d298fa434
1516 changed files with 535727 additions and 2 deletions

View File

@@ -1,3 +1,5 @@
ARG TAG
FROM node:latest FROM node:latest
# Install dependencies # Install dependencies
@@ -13,10 +15,11 @@ RUN curl -s https://api.github.com/repos/5rahim/seanime/releases/latest | grep '
TAG=$(cat tag.file) && \ TAG=$(cat tag.file) && \
wget https://github.com/5rahim/seanime/archive/refs/tags/${TAG}.tar.gz && \ wget https://github.com/5rahim/seanime/archive/refs/tags/${TAG}.tar.gz && \
tar -xzvf ${TAG}.tar.gz && \ tar -xzvf ${TAG}.tar.gz && \
rm ${TAG}.tar.gz tag.file rm ${TAG}.tar.gz tag.file && \
mv seanime-* seanime
# Set working directory to the extracted source code # Set working directory to the extracted source code
WORKDIR /seanime-${TAG} WORKDIR /seanime
# Build the web interface # Build the web interface
RUN cd seanime-web && \ RUN cd seanime-web && \

2
seanime-2.9.10/.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,2 @@
github: 5rahim
buy_me_a_coffee: 5rahim

View File

@@ -0,0 +1,111 @@
name: Bug report
description: Report a bug you encountered
title: 'bug: '
labels:
- bug
body:
- type: checkboxes
id: '1'
attributes:
label: Checklist
description: Please follow the general troubleshooting steps first
options:
- label: >-
My version of the app is the latest available
required: true
- label: >-
I have checked open and closed [issues](https://github.com/5rahim/seanime/issues)
required: true
- label: >-
I have checked the [docs](https://seanime.rahim.app/docs/faq) for a fix
required: true
- type: dropdown
id: '2'
attributes:
label: Bug Severity
description: Select the severity of the bug. Anything below "Panic" means the app doesn't crash.
options:
- Not sure
- Panic / Crash
- Usability is affected
- Low
validations:
required: true
- type: dropdown
id: '3'
attributes:
label: Bug Area
description: Select the general area of the app or process during which the bug occurred.
options:
- Other
- Authentication
- Configuration
- Anime Library
- Transcoding / Media Streaming
- Torrent Streaming
- Online Streaming
- Manga
- Settings
- Offline mode
- AniList
- UI / Web Interface
- Desktop app
validations:
required: true
- type: textarea
id: '4'
attributes:
label: Bug Description / Steps to Reproduce
description: Precisely describe the bug you encountered and the steps to reproduce it. Avoid vague descriptions.
validations:
required: true
- type: textarea
id: '5'
attributes:
label: Expected Behavior
description: Describe what you expected to happen.
- type: textarea
id: '6'
attributes:
label: Screenshots
description: If applicable, add screenshots of the bug
- type: textarea
id: '7'
attributes:
label: Logs
description: If applicable, add terminal output, browser console logs or stack traces. You can use [pastebin](https://pastebin.com) to share large logs.
validations:
required: true
- type: checkboxes
id: '8'
attributes:
label: Debugging Checklist
description: Confirm you have included at least some of the following debugging information. If you haven't, please do so before submitting the issue.
options:
- label: >-
I have included error messages
required: false
- label: >-
I have included server logs
required: false
- label: >-
I have included browser console logs
required: false
- type: input
id: '9'
attributes:
label: App Version
description: Enter the version of Seanime you are using.
placeholder: v1.0.0
validations:
required: true
- type: dropdown
id: '10'
attributes:
label: Operating System
options:
- Windows
- Linux
- MacOS
validations:
required: true

View File

@@ -0,0 +1,39 @@
name: Feature Request
description: Suggest an idea for the project
title: 'feature request: '
labels:
- request
body:
- type: checkboxes
id: '1'
attributes:
label: Checklist
description: >-
Please check the following before submitting a feature request. If you
are unable to check all the boxes, please provide more information in the
description.
options:
- label: >-
I checked that this feature has not been requested before
required: true
- label: >-
I checked that this feature is not in the "Not planned" list
required: true
- label: >-
This feature will benefit the majority of users
- type: textarea
id: '2'
attributes:
label: Problem Description / Use Case
description: >-
Provide a detailed description of the problem you are facing or the use case you have in mind.
validations:
required: true
- type: textarea
id: '3'
attributes:
label: Proposed Solution
description: >-
Provide a detailed description of the solution you'd like to see. If you have any ideas on how to implement the feature, please include them here.
validations:
required: true

View File

@@ -0,0 +1,57 @@
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
)
func main() {
const inFile = "CHANGELOG.md"
const outFile = "whats-new.md"
// Get the path to the changelog
changelogPath := filepath.Join(".", inFile)
// Read the changelog content
content, err := os.ReadFile(changelogPath)
if err != nil {
fmt.Println("Error reading changelog:", err)
return
}
// Convert the content to a string
changelog := string(content)
// Extract everything between the first and second "## " headers
sections := strings.Split(changelog, "## ")
if len(sections) < 2 {
fmt.Println("Not enough headers found in the changelog.")
return
}
// We only care about the first section
changelog = sections[1]
// Remove everything after the next header (if any)
changelog = strings.Split(changelog, "## ")[0]
// Remove the first line (which is the title of the first section)
lines := strings.Split(changelog, "\n")
if len(lines) > 1 {
changelog = strings.Join(lines[1:], "\n")
}
// Trim newlines
changelog = strings.TrimSpace(changelog)
// Write the extracted content to the output file
outPath := filepath.Join(".", outFile)
if err := os.WriteFile(outPath, []byte(changelog), 0644); err != nil {
fmt.Println("Error writing to file:", err)
return
}
fmt.Printf("Changelog content written to %s\n", outPath)
}

View File

@@ -0,0 +1,107 @@
package main
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
)
const (
DownloadUrl = "https://github.com/5rahim/seanime/releases/latest/download/"
)
func main() {
// Retrieve version from environment variable
version := os.Getenv("APP_VERSION")
if version == "" {
version = "1.0.0" // Default to '1.0.0' if not set
}
// Define the asset filenames
assets := map[string]struct {
Asset string
AppZip string
Sig string
}{
"MacOS_arm64": {
Asset: fmt.Sprintf("seanime-desktop-%s_MacOS_arm64.app.tar.gz", version),
Sig: fmt.Sprintf("seanime-desktop-%s_MacOS_arm64.app.tar.gz.sig", version),
},
"MacOS_x86_64": {
Asset: fmt.Sprintf("seanime-desktop-%s_MacOS_x86_64.app.tar.gz", version),
Sig: fmt.Sprintf("seanime-desktop-%s_MacOS_x86_64.app.tar.gz.sig", version),
},
"Linux_x86_64": {
Asset: fmt.Sprintf("seanime-desktop-%s_Linux_x86_64.AppImage", version),
Sig: fmt.Sprintf("seanime-desktop-%s_Linux_x86_64.AppImage.sig", version),
},
"Windows_x86_64": {
AppZip: fmt.Sprintf("seanime-desktop-%s_Windows_x86_64.exe", version),
Sig: fmt.Sprintf("seanime-desktop-%s_Windows_x86_64.sig", version),
},
}
// Function to generate URL based on asset names
generateURL := func(filename string) string {
return fmt.Sprintf("%s%s", DownloadUrl, filename)
}
// Prepare the JSON structure
latestJSON := map[string]interface{}{
"version": version,
"pub_date": time.Now().Format(time.RFC3339), // Change to the actual publish date
"platforms": map[string]map[string]string{
"linux-x86_64": {
"url": generateURL(assets["Linux_x86_64"].Asset),
"signature": getContent(assets["Linux_x86_64"].Sig),
},
"windows-x86_64": {
"url": generateURL(assets["Windows_x86_64"].AppZip),
"signature": getContent(assets["Windows_x86_64"].Sig),
},
"darwin-x86_64": {
"url": generateURL(assets["MacOS_x86_64"].Asset),
"signature": getContent(assets["MacOS_x86_64"].Sig),
},
"darwin-aarch64": {
"url": generateURL(assets["MacOS_arm64"].Asset),
"signature": getContent(assets["MacOS_arm64"].Sig),
},
},
}
// Remove non-existent assets
for platform, asset := range latestJSON["platforms"].(map[string]map[string]string) {
if asset["signature"] == "" {
delete(latestJSON["platforms"].(map[string]map[string]string), platform)
}
}
// Write to latest.json
outputPath := filepath.Join(".", "latest.json")
file, err := os.Create(outputPath)
if err != nil {
fmt.Println("Error creating file:", err)
return
}
defer file.Close()
encoder := json.NewEncoder(file)
encoder.SetIndent("", " ")
if err := encoder.Encode(latestJSON); err != nil {
fmt.Println("Error writing JSON to file:", err)
return
}
fmt.Printf("Generated %s successfully.\n", outputPath)
}
func getContent(filename string) string {
fileContent, err := os.ReadFile(filepath.Join(".", filename))
if err != nil {
return ""
}
return string(fileContent)
}

View File

@@ -0,0 +1,151 @@
name: Build Electron App
on:
workflow_call:
outputs:
appVersion:
description: "The version of the app"
value: ${{ jobs.build-electron.outputs.app_version }}
jobs:
build-electron:
strategy:
fail-fast: false
matrix:
# IDs:
# - seanime-denshi-darwin-arm
# - seanime-denshi-darwin-intel
# - seanime-denshi-linux
# - seanime-denshi-windows
include:
# For Mac Universal
- os: 'macos-latest'
id: 'seanime-denshi-darwin-arm64'
go_binary_id: 'seanime-server-darwin' # Artifact: go-seanime-server-darwin (contains both arm64 and x86_64)
electron_args: '--mac --arm64'
# For Intel-based macs
- os: 'macos-latest'
id: 'seanime-denshi-darwin-x64'
go_binary_id: 'seanime-server-darwin' # Artifact: go-seanime-server-darwin (contains both arm64 and x86_64)
electron_args: '--mac --x64'
# For Linux
- os: 'ubuntu-latest'
id: 'seanime-denshi-linux-x64'
go_binary_id: 'seanime-server-linux' # Artifact: go-seanime-server-linux (contains x86_64)
electron_args: '--linux'
# For Windows
- os: 'windows-latest'
id: 'seanime-denshi-windows-x64'
go_binary_id: 'seanime-server-windows' # Artifact: go-seanime-server-windows (contains x86_64)
electron_args: '--win'
runs-on: ${{ matrix.os }}
outputs:
app_version: ${{ steps.get-version.outputs.version }}
steps:
- name: Checkout code 📂
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Node.js 📦
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Get package version 📦
id: get-version
run: |
NODE_VERSION=$(node -p "require('./seanime-denshi/package.json').version")
echo "version=$NODE_VERSION" >> $GITHUB_OUTPUT
shell: bash
# Install dependencies
- name: Install dependencies (Ubuntu) 📦
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y libgtk-3-dev libnss3-dev libxss-dev libasound2-dev
# Download the web folders
- name: Download web folder artifact 📥
uses: actions/download-artifact@v4
with:
name: web-denshi
path: web-denshi
# Move web-denshi folder into seanime-denshi
- name: Move web-denshi folder 🚚
run: mv web-denshi seanime-denshi/
shell: bash
- name: Ensure binaries folder exists (UNIX)
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
run: mkdir -p ./seanime-denshi/binaries
- name: Ensure binaries folder exists (Windows)
if: matrix.os == 'windows-latest'
run: mkdir .\seanime-denshi\binaries
# Download the server binaries based on matrix.go_binary_id
- name: Download server binaries 📥
uses: actions/download-artifact@v4
with:
name: go-${{ matrix.go_binary_id }}
path: ./seanime-denshi/binaries
# Extract server binaries
- name: Extract server binaries (macOS x64) 📂
if: matrix.os == 'macos-latest' && matrix.id == 'seanime-denshi-darwin-x64'
# Extracts seanime-server-darwin-arm64 and seanime-server-darwin-amd64
# Only keep seanime-server-darwin-amd64
run: |
tar -xf ./seanime-denshi/binaries/binaries-${{ matrix.go_binary_id }}.tar -C ./seanime-denshi/binaries
# Remove the other binary
rm -rf ./seanime-denshi/binaries/seanime-server-darwin-arm64
- name: Extract server binaries (macOS arm64) 📂
if: matrix.os == 'macos-latest' && matrix.id == 'seanime-denshi-darwin-arm64'
# Extracts seanime-server-darwin-arm64 and seanime-server-darwin-amd64
# Only keep seanime-server-darwin-arm64
run: |
tar -xf ./seanime-denshi/binaries/binaries-${{ matrix.go_binary_id }}.tar -C ./seanime-denshi/binaries
# Remove the other binary
rm -rf ./seanime-denshi/binaries/seanime-server-darwin-amd64
- name: Extract server binaries (Linux) 📂
if: matrix.os == 'ubuntu-latest' && matrix.id == 'seanime-denshi-linux-x64'
# Extracts seanime-server-linux-amd64
run: tar -xf ./seanime-denshi/binaries/binaries-${{ matrix.go_binary_id }}.tar -C ./seanime-denshi/binaries
- name: Extract server binaries (Windows) 📂
if: matrix.os == 'windows-latest'
# Extracts seanime-server-windows-amd64
run: 7z x ".\seanime-denshi\binaries\binaries-${{ matrix.go_binary_id }}.zip" "-o./seanime-denshi/binaries/"
# Copy app icon
- name: Copy app icon 📝
run: |
mkdir -p ./seanime-denshi/assets
cp ./seanime-desktop/src-tauri/app-icon.png ./seanime-denshi/assets/
shell: bash
# Install and build
- name: Install and build 📦️
run: |
cd seanime-denshi
npm install
npm run build -- ${{ matrix.electron_args }}
shell: bash
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Upload the artifacts
- name: Upload Electron artifacts 📤
uses: actions/upload-artifact@v4
with:
name: electron-${{ matrix.id }}
path: |
./seanime-denshi-*
./seanime-denshi/dist/*.yml

View File

@@ -0,0 +1,700 @@
name: Release Draft
on:
workflow_dispatch:
push:
tags:
- 'v*'
permissions:
contents: write
jobs:
build-webapp: # TODO Uncomment if building web
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
# Web
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
# outputs in "seanime-web/out/" and "seanime-web/out-desktop/"
- name: Install dependencies and build Next.js app
run: |
cd seanime-web/
npm install
npm run build
npm run build:desktop
npm run build:denshi
cd ..
# Upload the output to be used in the next job
- name: Upload web folder
uses: actions/upload-artifact@v4
with:
name: web
path: seanime-web/out # output dir of build
- name: Upload web folder (Tauri)
uses: actions/upload-artifact@v4
with:
name: web-desktop
path: seanime-web/out-desktop # output dir of build:desktop
- name: Upload web folder (Electron)
uses: actions/upload-artifact@v4
with:
name: web-denshi
path: seanime-web/out-denshi # output dir of build:denshi
build-server:
needs: build-webapp # TODO Uncomment if building web
runs-on: ${{ matrix.os }}
strategy:
matrix:
# 6 binaries: 2 for Windows, 2 for Linux, 2 for macOS
include:
# This is the systray version of the Windows binary used for the server build
- os: macos-latest # seanime-server-systray-windows.exe
id: seanime-server-systray-windows
go_flags: -trimpath -buildmode=exe -ldflags="-s -w -H=windowsgui -extldflags '-static'"
# This is the non-systray version of the Windows binary used for the Tauri Windows build
- os: windows-latest # seanime-server-windows.exe
id: seanime-server-windows
go_flags: -trimpath -ldflags="-s -w" -tags=nosystray
# These are the Linux binaries used for the server build and the Tauri Linux build
- os: ubuntu-latest # seanime-server-linux-arm64, seanime-server-linux-amd64
id: seanime-server-linux
go_flags: -trimpath -ldflags="-s -w"
# These are the macOS binaries used for the server build and the Tauri macOS build
- os: macos-latest # seanime-server-darwin-arm64, seanime-server-darwin-amd64
id: seanime-server-darwin
go_env: CGO_ENABLED=0
go_flags: -trimpath -ldflags="-s -w"
steps:
- name: Checkout code ⬇️
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history
fetch-tags: true # Fetch all tags
set-safe-directory: true # Add repo path as safe.directory
- name: Fetch all tags # Fetch all tags (again? can't hurt)
run: git fetch --force --tags
# Go
- name: Set up Go ⬇️
uses: actions/setup-go@v5
with:
go-version: '1.24.3'
# Download the web folders
# TODO Uncomment if building web
- name: Download web folder artifact
uses: actions/download-artifact@v4
with:
name: web
path: web
# Create the binary destination folder
# ./binaries
# |--- ...
- name: Create binary destination folder (UNIX) 🗃️
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
run: mkdir -p binaries
- name: Create binary destination folder (Windows) 🗃️
if: matrix.os == 'windows-latest'
run: mkdir -p binaries
shell: bash
#---
# ONLY for Windows systray build (seanime-server-systray-windows)
# For the Windows systray build (built on macOS runner), we need to install the necessary dependencies
- name: Install C dependencies ⬇️ # macos windows systray build
if: matrix.id == 'seanime-server-systray-windows'
run: |
brew install filosottile/musl-cross/musl-cross
brew install llvm
brew install mingw-w64
# Build the Windows systray binary
# ./binaries/seanime-server-systray-windows.exe
- name: Build Windows Systray 📦️
if: matrix.id == 'seanime-server-systray-windows'
env:
GOARCH: amd64
GOOS: windows
CGO_ENABLED: 1
CC: x86_64-w64-mingw32-gcc
CXX: x86_64-w64-mingw32-g++
run: |
go build -o seanime-server-systray-windows.exe ${{ matrix.go_flags }} .
# Build the Windows non-systray binary
# ./seanime-server-windows.exe
- name: Build Windows Non-Systray 📦️
if: matrix.id == 'seanime-server-windows'
env:
GOARCH: amd64
GOOS: windows
CGO_ENABLED: 0
run: |
go build -o seanime-server-windows.exe ${{ matrix.go_flags }} .
shell: bash
# Build the Linux binaries
# ./seanime-server-linux-amd64
# ./seanime-server-linux-arm64
- name: Build Linux 📦️
if: matrix.id == 'seanime-server-linux'
run: |
CGO_ENABLED=0 GOARCH=amd64 go build -o seanime-server-linux-amd64 ${{ matrix.go_flags }} .
CGO_ENABLED=0 GOARCH=arm64 go build -o seanime-server-linux-arm64 ${{ matrix.go_flags }} .
# Build the macOS binaries
# ./seanime-server-darwin-amd64
# ./seanime-server-darwin-arm64
- name: Build macOS 📦️
if: matrix.id == 'seanime-server-darwin'
run: |
CGO_ENABLED=0 GOARCH=amd64 go build -o seanime-server-darwin-amd64 ${{ matrix.go_flags }} .
CGO_ENABLED=0 GOARCH=arm64 go build -o seanime-server-darwin-arm64 ${{ matrix.go_flags }} .
# Tar the binaries
- name: Tar the binaries (UNIX) 🗃️
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
# binaries-seanime-server-darwin.tar
# binaries-seanime-server-linux.tar
# binaries-seanime-server-systray-windows.tar
run: |
tar -cf binaries-${{ matrix.id }}.tar seanime-server-*
# Zip the binaries
- name: Zip the binaries (Windows) 🗃️
if: matrix.os == 'windows-latest'
# binaries-seanime-server-windows.zip
run: |
7z a "binaries-${{ matrix.id }}.zip" seanime-server-*
# Upload the binaries to be used in the next job
- name: Upload binary folder (UNIX) 📤
uses: actions/upload-artifact@v4
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
with:
# go-seanime-server-linux
# go-seanime-server-darwin
# go-seanime-server-systray-windows
name: go-${{ matrix.id }}
path: binaries-${{ matrix.id }}.tar
- name: Upload binary folder (Windows) 📤
uses: actions/upload-artifact@v4
if: matrix.os == 'windows-latest'
with:
# go-seanime-server-windows
name: go-${{ matrix.id }}
path: binaries-${{ matrix.id }}.zip
build-tauri:
needs: build-server
strategy:
fail-fast: false
matrix:
# IDs:
# - seanime-desktop-darwin-arm
# - seanime-desktop-darwin-intel
# - seanime-desktop-linux
# - seanime-desktop-windows
include:
# For Arm-based macs (M1 and above).
- os: 'macos-latest'
id: 'seanime-desktop-darwin-arm'
go_binary_id: 'seanime-server-darwin' # Artifact: go-seanime-server-darwin (contains both arm64 and x86_64)
args: '--target aarch64-apple-darwin'
# For Intel-based macs.
- os: 'macos-latest'
id: 'seanime-desktop-darwin-intel'
go_binary_id: 'seanime-server-darwin' # Artifact: go-seanime-server-darwin (contains both arm64 and x86_64)
args: '--target x86_64-apple-darwin'
# For Linux
- os: 'ubuntu-22.04' # for Linux
id: 'seanime-desktop-linux' # Artifact: go-seanime-server-linux (contains both arm64 and x86_64)
go_binary_id: 'seanime-server-linux'
args: ''
# For Windows
- os: 'windows-latest' # for Windows
id: 'seanime-desktop-windows' # Artifact: go-seanime-server-windows (contains x86_64)
go_binary_id: 'seanime-server-windows'
args: ''
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Install dependencies (Ubuntu) ⬇️
if: matrix.os == 'ubuntu-22.04'
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install Rust stable ⬇️
uses: dtolnay/rust-toolchain@stable
with:
# Those targets are only used on macOS runners so it's in an `if` to slightly speed up windows and linux builds.
targets: ${{ matrix.id == 'seanime-desktop-darwin-intel' && 'x86_64-apple-darwin' || matrix.id == 'seanime-desktop-darwin-arm' && 'aarch64-apple-darwin' || '' }}
- name: Setup node ⬇️
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install Tauri CLI ⬇️
run: |
cd seanime-desktop
npm install
- name: Rust cache ⬇️
uses: swatinem/rust-cache@v2
with:
workspaces: './seanime-desktop/src-tauri -> target'
# Download the web folder
# TODO Uncomment if building web
- name: Download web folder artifact
uses: actions/download-artifact@v4
with:
name: web-desktop
path: web-desktop
# Download the server binaries depending on matrix.go_binary_id
- name: Download server binaries 📥
uses: actions/download-artifact@v4
with:
# go-seanime-server-windows or
# go-seanime-server-linux or
# go-seanime-server-darwin
name: go-${{ matrix.go_binary_id }}
path: ./seanime-desktop/src-tauri/binaries
- name: Extract server binaries (UNIX) 📂
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: tar -xf ./seanime-desktop/src-tauri/binaries/binaries-${{ matrix.go_binary_id }}.tar -C ./seanime-desktop/src-tauri/binaries
- name: Extract server binaries (Windows) 📂
if: matrix.os == 'windows-latest'
run: 7z x ".\seanime-desktop\src-tauri\binaries\binaries-${{ matrix.go_binary_id }}.zip" "-o./seanime-desktop/src-tauri/binaries/"
# ----------------------------------------------------------------- delete
- name: Print downloaded binaries (UNIX)
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: ls -la ./seanime-desktop/src-tauri/binaries
- name: Print downloaded binaries (Windows)
if: matrix.os == 'windows-latest'
run: dir ./seanime-desktop/src-tauri/binaries
# ----------------------------------------------------------------- delete
- name: Determine target triple (UNIX) 🎯
# id: target_triple
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: |
TARGET_TRIPLE=$(rustc -Vv | grep host | cut -f2 -d' ')
echo "TARGET_TRIPLE=${TARGET_TRIPLE}" >> $GITHUB_ENV
- name: Determine target triple (Windows) 🎯
# id: target_triple
if: matrix.os == 'windows-latest'
run: |
$TARGET_TRIPLE = rustc -Vv | Select-String "host:" | ForEach-Object {$_.Line.split(" ")[1]}
echo "TARGET_TRIPLE=$TARGET_TRIPLE" >> $env:GITHUB_ENV
shell: pwsh
# seanime-server-windows.exe -> seanime-x86_64-pc-windows-msvc.exe
- name: Rename sidecar binary (Windows) 📝
if: matrix.id == 'seanime-desktop-windows'
run: |
powershell -Command "Rename-Item -Path ./seanime-desktop/src-tauri/binaries/seanime-server-windows.exe -NewName seanime-${{ env.TARGET_TRIPLE }}.exe"
# seanime-server-linux-amd64 -> seanime-unknown-linux-musl
- name: Rename sidecar binaries (Linux) 📝
if: matrix.id == 'seanime-desktop-linux'
run: |
mv ./seanime-desktop/src-tauri/binaries/seanime-server-linux-amd64 ./seanime-desktop/src-tauri/binaries/seanime-${{ env.TARGET_TRIPLE }}
# seanime-server-darwin-amd64 -> seanime-x86_64-apple-darwin
- name: Rename sidecar binaries (MacOS Intel) 📝
if: matrix.id == 'seanime-desktop-darwin-intel'
# Here we hardcode the target triple because the macOS runner is ARM based
run: |
mv ./seanime-desktop/src-tauri/binaries/seanime-server-darwin-amd64 ./seanime-desktop/src-tauri/binaries/seanime-x86_64-apple-darwin
# seanime-server-darwin-arm64 -> seanime-aarch64-apple-darwin
- name: Rename sidecar binaries (MacOS Arm) 📝
if: matrix.id == 'seanime-desktop-darwin-arm'
run: |
mv ./seanime-desktop/src-tauri/binaries/seanime-server-darwin-arm64 ./seanime-desktop/src-tauri/binaries/seanime-${{ env.TARGET_TRIPLE }}
# ----------------------------------------------------------------- delete
- name: Print downloaded binaries
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: ls -la ./seanime-desktop/src-tauri/binaries
- name: Print downloaded binaries
if: matrix.os == 'windows-latest'
run: dir ./seanime-desktop/src-tauri/binaries
# ----------------------------------------------------------------- delete
# Build Tauri
- name: Run Tauri action 🚀
id: tauri-action
uses: tauri-apps/tauri-action@v0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
# APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
# APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
# APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
# APPLE_ID: ${{ secrets.APPLE_ID }}
# APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
# APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
with:
projectPath: './seanime-desktop'
args: ${{ matrix.args }}
updaterJsonPreferNsis: true
- name: Rename Tauri artifacts (UNIX) 📝
# ./
# |- seanime-desktop-darwin-arm.app.tar.gz
# |- seanime-desktop-darwin-arm.app.tar.gz.sig <- Signature
# |- seanime-desktop-darwin-intel.app.tar.gz
# |- seanime-desktop-darwin-intel.app.tar.gz.sig <- Signature
# |- seanime-desktop-linux.AppImage <- UNCOMPRESSED
# |- seanime-desktop-linux.AppImage.sig <- Signature UNCOMPRESSED
# |- seanime-desktop-windows-setup.exe <- UNCOMPRESSED
# |- seanime-desktop-windows-setup.exe.sig <- Signature UNCOMPRESSED
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
# We hardcode the macOS target triple because the macOS runner is ARM based and builds both arm64 and x86_64
run: |
if [ -f ./seanime-desktop/src-tauri/target/aarch64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ]; then
mv ./seanime-desktop/src-tauri/target/aarch64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ./seanime-desktop-darwin-arm.app.tar.gz
mv ./seanime-desktop/src-tauri/target/aarch64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz.sig ./seanime-desktop-darwin-arm.app.tar.gz.sig
elif [ -f ./seanime-desktop/src-tauri/target/x86_64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ]; then
mv ./seanime-desktop/src-tauri/target/x86_64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ./seanime-desktop-darwin-intel.app.tar.gz
mv ./seanime-desktop/src-tauri/target/x86_64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz.sig ./seanime-desktop-darwin-intel.app.tar.gz.sig
elif [ -f ./seanime-desktop/src-tauri/target/release/bundle/appimage/Seanime\ Desktop_${{ steps.tauri-action.outputs.appVersion }}_amd64.AppImage ]; then
mv ./seanime-desktop/src-tauri/target/release/bundle/appimage/Seanime\ Desktop_${{ steps.tauri-action.outputs.appVersion }}_amd64.AppImage ./seanime-desktop-linux.AppImage
mv ./seanime-desktop/src-tauri/target/release/bundle/appimage/Seanime\ Desktop_${{ steps.tauri-action.outputs.appVersion }}_amd64.AppImage.sig ./seanime-desktop-linux.AppImage.sig
fi
- name: Rename Tauri artifacts (Windows) 📝
if: matrix.os == 'windows-latest'
run: |
powershell -Command "Move-Item -Path './seanime-desktop/src-tauri/target/release/bundle/nsis/Seanime Desktop_${{ steps.tauri-action.outputs.appVersion }}_x64-setup.exe' -Destination './seanime-desktop-windows-setup.exe'"
powershell -Command "Move-Item -Path './seanime-desktop/src-tauri/target/release/bundle/nsis/Seanime Desktop_${{ steps.tauri-action.outputs.appVersion }}_x64-setup.exe.sig' -Destination './seanime-desktop-windows-setup.exe.sig'"
- name: Tar the Tauri artifacts (Linux) 🗃️
if: matrix.os == 'ubuntu-22.04'
# Note: The macOS artifacts are already packaged, so we don't need to compress them
# Compress the Linux AppImage, not the signature
run: |
if [ -f ./seanime-desktop-linux.AppImage ]; then
tar -czf seanime-desktop-linux.AppImage.tar.gz seanime-desktop-linux.AppImage
fi
- name: Zip the Tauri artifacts (Windows) 🗃️
if: matrix.os == 'windows-latest'
# Compress the Windows setup, not the signature
run: |
7z a seanime-desktop-windows-setup.exe.zip seanime-desktop-windows-setup.exe
# ----------------------------------------------------------------- delete
- name: Print all
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: ls -la .
- name: Print downloaded binaries
if: matrix.os == 'windows-latest'
run: dir .
# ----------------------------------------------------------------- delete
# Upload the Tauri artifacts to be used in the next job
- name: Upload tauri artifacts 📤
uses: actions/upload-artifact@v4
with:
# Artifact IDs:
# tauri-seanime-server-darwin-arm
# tauri-seanime-server-darwin-intel
# tauri-seanime-server-linux
# tauri-seanime-server-windows
name: tauri-${{ matrix.id }}
path: |
./seanime-desktop-darwin-arm.app.tar.gz
./seanime-desktop-darwin-arm.app.tar.gz.sig
./seanime-desktop-darwin-intel.app.tar.gz
./seanime-desktop-darwin-intel.app.tar.gz.sig
./seanime-desktop-linux.AppImage
./seanime-desktop-linux.AppImage.tar.gz
./seanime-desktop-linux.AppImage.sig
./seanime-desktop-windows-setup.exe
./seanime-desktop-windows-setup.exe.zip
./seanime-desktop-windows-setup.exe.sig
build-electron:
needs: build-server
uses: electron-build.yml.new
release:
runs-on: ubuntu-latest
needs: [ build-server, build-tauri, build-electron ]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download tauri artifacts 📥
uses: actions/download-artifact@v4
with:
pattern: tauri-*
path: ./artifacts
merge-multiple: true
- name: Download electron artifacts 📥
uses: actions/download-artifact@v4
with:
pattern: electron-*
path: ./artifacts
merge-multiple: true
- name: Determine version from tag name 🔎
run: |
if [[ "$GITHUB_REF" == refs/tags/v* ]]; then
VERSION=${GITHUB_REF/refs\/tags\/v/}
echo "Version extracted from tag: $VERSION"
elif [[ "$GITHUB_REF" == refs/tags/* ]]; then
VERSION=${GITHUB_REF/refs\/tags\//}
echo "Version extracted from tag: $VERSION"
else
echo "Warning: No tag associated with this run. Defaulting to version 0.1.0."
VERSION="0.1.0"
fi
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo "Version is ${{ env.VERSION }}"
- name: Download server binaries 📥
uses: actions/download-artifact@v4
with:
pattern: go-*
path: ./artifacts
# ./artifacts
# |- binaries-seanime-server-darwin.tar (contains 2)
# |- binaries-seanime-server-linux.tar (contains 2)
# |- binaries-seanime-server-systray-windows.tar (contains 1)
merge-multiple: true
- name: Print all artifacts
run: ls -la ./artifacts
- name: Extract - Rename - Archive server binaries 📂
# ./artifacts
# |- ...
# \/ /binaries-seanime-server-darwin.tar
# |- seanime-server-darwin-amd64 -> ../seanime -> ../seanime-${{ env.VERSION }}_MacOS_arm64.tar.gz
# |- seanime-server-darwin-arm64 -> ../seanime -> ../seanime-${{ env.VERSION }}_MacOS_x86_64.tar.gz
# \/ /binaries-seanime-server-darwin.tar
# |- seanime-server-linux-amd64 -> ../seanime -> ../seanime-${{ env.VERSION }}_Linux_x86_64.tar.gz
# |- seanime-server-linux-arm64 -> ../seanime -> ../seanime-${{ env.VERSION }}_Linux_arm64.tar.gz
# \/ /binaries-seanime-server-systray-windows.tar
# |- seanime-server-systray-windows.exe -> ../seanime.exe -> ../seanime-${{ env.VERSION }}_Windows_x86_64.zip
run: |
if [ -f ./artifacts/binaries-seanime-server-darwin.tar ]; then
# Extract binaries
tar -xf ./artifacts/binaries-seanime-server-darwin.tar -C ./artifacts
# Rename & compress binaries
mv ./artifacts/seanime-server-darwin-amd64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_MacOS_x86_64.tar.gz ./seanime
rm -rf ./seanime
mv ./artifacts/seanime-server-darwin-arm64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_MacOS_arm64.tar.gz ./seanime
rm -rf ./seanime
fi
if [ -f ./artifacts/binaries-seanime-server-linux.tar ]; then
# Extract binaries
tar -xf ./artifacts/binaries-seanime-server-linux.tar -C ./artifacts
# Rename & compress binaries
mv ./artifacts/seanime-server-linux-amd64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_Linux_x86_64.tar.gz ./seanime
rm -rf ./seanime
mv ./artifacts/seanime-server-linux-arm64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_Linux_arm64.tar.gz ./seanime
rm -rf ./seanime
fi
if [ -f ./artifacts/binaries-seanime-server-systray-windows.tar ]; then
# Extract binaries
tar -xf ./artifacts/binaries-seanime-server-systray-windows.tar -C ./artifacts
# Rename & compress binaries
mv ./artifacts/seanime-server-systray-windows.exe ./seanime.exe
7z a ./seanime-${{ env.VERSION }}_Windows_x86_64.zip ./seanime.exe
rm -rf ./seanime.exe
fi
shell: bash
- name: Print all artifacts
run: ls -la ./artifacts
- name: Move & Rename Tauri assets 📝🗃️
# Move Tauri assets to the root directory and rename them
# ./artifacts
# |- seanime-desktop-darwin-arm.app.tar.gz -> ../seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz
# |- seanime-desktop-darwin-arm.app.tar.gz.sig -> ../seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz.sig
# |- seanime-desktop-darwin-intel.app.tar.gz -> ../seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz
# |- seanime-desktop-darwin-intel.app.tar.gz.sig -> ../seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz.sig
# |- seanime-desktop-linux.AppImage -> ../seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage
# |- seanime-desktop-linux.AppImage.tar.gz -> ../seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.tar.gz
# |- seanime-desktop-linux.AppImage.sig -> ../seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.sig
# |- seanime-desktop-windows-setup.exe -> ../seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe
# |- seanime-desktop-windows-setup.exe.zip -> ../seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.zip
# |- seanime-desktop-windows-setup.exe.sig -> ../seanime-desktop-${{ env.VERSION }}_Windows_x86_64.sig
run: |
if [ -f ./artifacts/seanime-desktop-darwin-arm.app.tar.gz ]; then
mv ./artifacts/seanime-desktop-darwin-arm.app.tar.gz ./seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz
mv ./artifacts/seanime-desktop-darwin-arm.app.tar.gz.sig ./seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz.sig
fi
if [ -f ./artifacts/seanime-desktop-darwin-intel.app.tar.gz ]; then
mv ./artifacts/seanime-desktop-darwin-intel.app.tar.gz ./seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz
mv ./artifacts/seanime-desktop-darwin-intel.app.tar.gz.sig ./seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz.sig
fi
if [ -f ./artifacts/seanime-desktop-linux.AppImage.tar.gz ]; then
mv ./artifacts/seanime-desktop-linux.AppImage ./seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage
mv ./artifacts/seanime-desktop-linux.AppImage.tar.gz ./seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.tar.gz
mv ./artifacts/seanime-desktop-linux.AppImage.sig ./seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.sig
fi
if [ -f ./artifacts/seanime-desktop-windows-setup.exe.zip ]; then
mv ./artifacts/seanime-desktop-windows-setup.exe ./seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe
mv ./artifacts/seanime-desktop-windows-setup.exe.zip ./seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.zip
mv ./artifacts/seanime-desktop-windows-setup.exe.sig ./seanime-desktop-${{ env.VERSION }}_Windows_x86_64.sig
fi
- name: Move & Rename Electron assets 📝🗃️
# Move Electron assets to the root directory and rename them
run: |
if [ -f ./artifacts/seanime-denshi-darwin-arm64.dmg ]; then
mv ./artifacts/seanime-denshi-darwin-arm64.dmg ./seanime-denshi-${{ env.VERSION }}_MacOS_arm64.dmg
fi
if [ -f ./artifacts/seanime-denshi-darwin-x64.dmg ]; then
mv ./artifacts/seanime-denshi-darwin-x64.dmg ./seanime-denshi-${{ env.VERSION }}_MacOS_x64.dmg
fi
if [ -f ./artifacts/seanime-denshi-linux-x64.AppImage ]; then
mv ./artifacts/seanime-denshi-linux-x64.AppImage ./seanime-denshi-${{ env.VERSION }}_Linux_x64.AppImage
fi
if [ -f ./artifacts/seanime-denshi-windows-x64.exe ]; then
mv ./artifacts/seanime-denshi-windows-x64.exe ./seanime-denshi-${{ env.VERSION }}_Windows_x64.exe
fi
# Copy electron-builder YML files if they exist
find ./artifacts -name "*.yml" -exec cp {} ./ \;
- name: Print all
run: ls -la .
# Go
- name: Set up Go ⬇️
uses: actions/setup-go@v5
with:
go-version: '1.24.3'
# Build the Go script
- name: Build Go scripts 🛠️
run: |
go build -o generate_updater_latest ./.github/scripts/generate_updater_latest.go
go build -o generate_release_notes ./.github/scripts/generate_release_notes.go
# Run the Go scripts
- name: Generate latest.json 📦️
env:
APP_VERSION: ${{ env.VERSION }}
run: ./generate_updater_latest
- name: Generate release notes 📦️
env:
APP_VERSION: ${{ env.VERSION }}
run: ./generate_release_notes
- name: Read release notes 🔍
id: read_release_notes
run: |
BODY=$(cat whats-new.md)
echo "RELEASE_BODY<<EOF" >> $GITHUB_ENV
echo "$BODY" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Create release draft 🚀🚀🚀
id: create_release
uses: softprops/action-gh-release@v2
with:
fail_on_unmatched_files: false
files: |
latest.json
latest.yml
latest-linux.yml
latest-mac.yml
latest-mac-arm64.yml
# Tauri Desktop builds
seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz
seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz.sig
seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz
seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz.sig
seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage
seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.tar.gz
seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.sig
seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe
seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.zip
seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.sig
# Electron Desktop builds
seanime-denshi-${{ env.VERSION }}_MacOS_arm64.dmg
seanime-denshi-${{ env.VERSION }}_MacOS_x64.dmg
seanime-denshi-${{ env.VERSION }}_Linux_x64.AppImage
seanime-denshi-${{ env.VERSION }}_Windows_x64.exe
# Server builds
seanime-${{ env.VERSION }}_MacOS_x86_64.tar.gz
seanime-${{ env.VERSION }}_MacOS_arm64.tar.gz
seanime-${{ env.VERSION }}_Linux_x86_64.tar.gz
seanime-${{ env.VERSION }}_Linux_arm64.tar.gz
seanime-${{ env.VERSION }}_Windows_x86_64.zip
token: ${{ secrets.GITHUB_TOKEN }}
tag_name: v${{ env.VERSION }}
release_name: v${{ env.VERSION }}
draft: true
prerelease: false
body: |
## What's new?
${{ env.RELEASE_BODY }}
---
[Open an issue](https://github.com/5rahim/seanime/issues/new/choose)

View File

@@ -0,0 +1,653 @@
name: Release Draft
on:
workflow_dispatch:
push:
tags:
- 'v*'
permissions:
contents: write
jobs:
build-webapp: # TODO Uncomment if building web
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
# Web
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
# outputs in "seanime-web/out/" and "seanime-web/out-desktop/"
- name: Install dependencies and build Next.js app
run: |
cd seanime-web/
npm install
npm run build
npm run build:desktop
cd ..
# Upload the output to be used in the next job
- name: Upload web folder
uses: actions/upload-artifact@v4
with:
name: web
path: seanime-web/out # output dir of build
- name: Upload web folder (desktop)
uses: actions/upload-artifact@v4
with:
name: web-desktop
path: seanime-web/out-desktop # output dir of build:desktop
build-server:
needs: build-webapp # TODO Uncomment if building web
runs-on: ${{ matrix.os }}
strategy:
matrix:
# 6 binaries: 2 for Windows, 2 for Linux, 2 for macOS
include:
# This is the systray version of the Windows binary used for the server build
- os: macos-latest # seanime-server-systray-windows.exe
id: seanime-server-systray-windows
go_flags: -trimpath -buildmode=exe -ldflags="-s -w -H=windowsgui -extldflags '-static'"
# This is the non-systray version of the Windows binary used for the Tauri Windows build
- os: windows-latest # seanime-server-windows.exe
id: seanime-server-windows
go_flags: -trimpath -ldflags="-s -w" -tags=nosystray
# These are the Linux binaries used for the server build and the Tauri Linux build
- os: ubuntu-latest # seanime-server-linux-arm64, seanime-server-linux-amd64
id: seanime-server-linux
go_flags: -trimpath -ldflags="-s -w"
# These are the macOS binaries used for the server build and the Tauri macOS build
- os: macos-latest # seanime-server-darwin-arm64, seanime-server-darwin-amd64
id: seanime-server-darwin
go_env: CGO_ENABLED=0
go_flags: -trimpath -ldflags="-s -w"
steps:
- name: Checkout code ⬇️
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history
fetch-tags: true # Fetch all tags
set-safe-directory: true # Add repo path as safe.directory
- name: Fetch all tags # Fetch all tags (again? can't hurt)
run: git fetch --force --tags
# Go
- name: Set up Go ⬇️
uses: actions/setup-go@v5
with:
go-version: '1.24.1'
# Download the web folders
# TODO Uncomment if building web
- name: Download web folder artifact
uses: actions/download-artifact@v4
with:
name: web
path: web
# Create the binary destination folder
# ./binaries
# |--- ...
- name: Create binary destination folder (UNIX) 🗃️
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
run: mkdir -p binaries
- name: Create binary destination folder (Windows) 🗃️
if: matrix.os == 'windows-latest'
run: mkdir -p binaries
shell: bash
#---
# ONLY for Windows systray build (seanime-server-systray-windows)
# For the Windows systray build (built on macOS runner), we need to install the necessary dependencies
- name: Install C dependencies ⬇️ # macos windows systray build
if: matrix.id == 'seanime-server-systray-windows'
run: |
brew install filosottile/musl-cross/musl-cross
brew install llvm
brew install mingw-w64
# Build the Windows systray binary
# ./binaries/seanime-server-systray-windows.exe
- name: Build Windows Systray 📦️
if: matrix.id == 'seanime-server-systray-windows'
env:
GOARCH: amd64
GOOS: windows
CGO_ENABLED: 1
CC: x86_64-w64-mingw32-gcc
CXX: x86_64-w64-mingw32-g++
run: |
go build -o seanime-server-systray-windows.exe ${{ matrix.go_flags }} .
# Build the Windows non-systray binary
# ./seanime-server-windows.exe
- name: Build Windows Non-Systray 📦️
if: matrix.id == 'seanime-server-windows'
env:
GOARCH: amd64
GOOS: windows
CGO_ENABLED: 0
run: |
go build -o seanime-server-windows.exe ${{ matrix.go_flags }} .
shell: bash
# Build the Linux binaries
# ./seanime-server-linux-amd64
# ./seanime-server-linux-arm64
- name: Build Linux 📦️
if: matrix.id == 'seanime-server-linux'
run: |
CGO_ENABLED=0 GOARCH=amd64 go build -o seanime-server-linux-amd64 ${{ matrix.go_flags }} .
CGO_ENABLED=0 GOARCH=arm64 go build -o seanime-server-linux-arm64 ${{ matrix.go_flags }} .
# Build the macOS binaries
# ./seanime-server-darwin-amd64
# ./seanime-server-darwin-arm64
- name: Build macOS 📦️
if: matrix.id == 'seanime-server-darwin'
run: |
CGO_ENABLED=0 GOARCH=amd64 go build -o seanime-server-darwin-amd64 ${{ matrix.go_flags }} .
CGO_ENABLED=0 GOARCH=arm64 go build -o seanime-server-darwin-arm64 ${{ matrix.go_flags }} .
# Tar the binaries
- name: Tar the binaries (UNIX) 🗃️
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
# binaries-seanime-server-darwin.tar
# binaries-seanime-server-linux.tar
# binaries-seanime-server-systray-windows.tar
run: |
tar -cf binaries-${{ matrix.id }}.tar seanime-server-*
# Zip the binaries
- name: Zip the binaries (Windows) 🗃️
if: matrix.os == 'windows-latest'
# binaries-seanime-server-windows.zip
run: |
7z a "binaries-${{ matrix.id }}.zip" seanime-server-*
# Upload the binaries to be used in the next job
- name: Upload binary folder (UNIX) 📤
uses: actions/upload-artifact@v4
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
with:
# go-seanime-server-linux
# go-seanime-server-darwin
# go-seanime-server-systray-windows
name: go-${{ matrix.id }}
path: binaries-${{ matrix.id }}.tar
- name: Upload binary folder (Windows) 📤
uses: actions/upload-artifact@v4
if: matrix.os == 'windows-latest'
with:
# go-seanime-server-windows
name: go-${{ matrix.id }}
path: binaries-${{ matrix.id }}.zip
build-tauri:
needs: build-server
strategy:
fail-fast: false
matrix:
# IDs:
# - seanime-desktop-darwin-arm
# - seanime-desktop-darwin-intel
# - seanime-desktop-linux
# - seanime-desktop-windows
include:
# For Arm-based macs (M1 and above).
- os: 'macos-latest'
id: 'seanime-desktop-darwin-arm'
go_binary_id: 'seanime-server-darwin' # Artifact: go-seanime-server-darwin (contains both arm64 and x86_64)
args: '--target aarch64-apple-darwin'
# For Intel-based macs.
- os: 'macos-latest'
id: 'seanime-desktop-darwin-intel'
go_binary_id: 'seanime-server-darwin' # Artifact: go-seanime-server-darwin (contains both arm64 and x86_64)
args: '--target x86_64-apple-darwin'
# For Linux
- os: 'ubuntu-22.04' # for Linux
id: 'seanime-desktop-linux' # Artifact: go-seanime-server-linux (contains both arm64 and x86_64)
go_binary_id: 'seanime-server-linux'
args: ''
# For Windows
- os: 'windows-latest' # for Windows
id: 'seanime-desktop-windows' # Artifact: go-seanime-server-windows (contains x86_64)
go_binary_id: 'seanime-server-windows'
args: ''
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Install dependencies (Ubuntu) ⬇️
if: matrix.os == 'ubuntu-22.04'
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install Rust stable ⬇️
uses: dtolnay/rust-toolchain@stable
with:
# Those targets are only used on macOS runners so it's in an `if` to slightly speed up windows and linux builds.
targets: ${{ matrix.id == 'seanime-desktop-darwin-intel' && 'x86_64-apple-darwin' || matrix.id == 'seanime-desktop-darwin-arm' && 'aarch64-apple-darwin' || '' }}
- name: Setup node ⬇️
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install Tauri CLI ⬇️
run: |
cd seanime-desktop
npm install
- name: Rust cache ⬇️
uses: swatinem/rust-cache@v2
with:
workspaces: './seanime-desktop/src-tauri -> target'
# Download the web folder
# TODO Uncomment if building web
- name: Download web folder artifact
uses: actions/download-artifact@v4
with:
name: web-desktop
path: web-desktop
# Download the server binaries depending on matrix.go_binary_id
- name: Download server binaries 📥
uses: actions/download-artifact@v4
with:
# go-seanime-server-windows or
# go-seanime-server-linux or
# go-seanime-server-darwin
name: go-${{ matrix.go_binary_id }}
path: ./seanime-desktop/src-tauri/binaries
- name: Extract server binaries (UNIX) 📂
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: tar -xf ./seanime-desktop/src-tauri/binaries/binaries-${{ matrix.go_binary_id }}.tar -C ./seanime-desktop/src-tauri/binaries
- name: Extract server binaries (Windows) 📂
if: matrix.os == 'windows-latest'
run: 7z x ".\seanime-desktop\src-tauri\binaries\binaries-${{ matrix.go_binary_id }}.zip" "-o./seanime-desktop/src-tauri/binaries/"
# ----------------------------------------------------------------- delete
- name: Print downloaded binaries (UNIX)
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: ls -la ./seanime-desktop/src-tauri/binaries
- name: Print downloaded binaries (Windows)
if: matrix.os == 'windows-latest'
run: dir ./seanime-desktop/src-tauri/binaries
# ----------------------------------------------------------------- delete
- name: Determine target triple (UNIX) 🎯
# id: target_triple
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: |
TARGET_TRIPLE=$(rustc -Vv | grep host | cut -f2 -d' ')
echo "TARGET_TRIPLE=${TARGET_TRIPLE}" >> $GITHUB_ENV
- name: Determine target triple (Windows) 🎯
# id: target_triple
if: matrix.os == 'windows-latest'
run: |
$TARGET_TRIPLE = rustc -Vv | Select-String "host:" | ForEach-Object {$_.Line.split(" ")[1]}
echo "TARGET_TRIPLE=$TARGET_TRIPLE" >> $env:GITHUB_ENV
shell: pwsh
# seanime-server-windows.exe -> seanime-x86_64-pc-windows-msvc.exe
- name: Rename sidecar binary (Windows) 📝
if: matrix.id == 'seanime-desktop-windows'
run: |
powershell -Command "Rename-Item -Path ./seanime-desktop/src-tauri/binaries/seanime-server-windows.exe -NewName seanime-${{ env.TARGET_TRIPLE }}.exe"
# seanime-server-linux-amd64 -> seanime-unknown-linux-musl
- name: Rename sidecar binaries (Linux) 📝
if: matrix.id == 'seanime-desktop-linux'
run: |
mv ./seanime-desktop/src-tauri/binaries/seanime-server-linux-amd64 ./seanime-desktop/src-tauri/binaries/seanime-${{ env.TARGET_TRIPLE }}
# seanime-server-darwin-amd64 -> seanime-x86_64-apple-darwin
- name: Rename sidecar binaries (MacOS Intel) 📝
if: matrix.id == 'seanime-desktop-darwin-intel'
# Here we hardcode the target triple because the macOS runner is ARM based
run: |
mv ./seanime-desktop/src-tauri/binaries/seanime-server-darwin-amd64 ./seanime-desktop/src-tauri/binaries/seanime-x86_64-apple-darwin
# seanime-server-darwin-arm64 -> seanime-aarch64-apple-darwin
- name: Rename sidecar binaries (MacOS Arm) 📝
if: matrix.id == 'seanime-desktop-darwin-arm'
run: |
mv ./seanime-desktop/src-tauri/binaries/seanime-server-darwin-arm64 ./seanime-desktop/src-tauri/binaries/seanime-${{ env.TARGET_TRIPLE }}
# ----------------------------------------------------------------- delete
- name: Print downloaded binaries
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: ls -la ./seanime-desktop/src-tauri/binaries
- name: Print downloaded binaries
if: matrix.os == 'windows-latest'
run: dir ./seanime-desktop/src-tauri/binaries
# ----------------------------------------------------------------- delete
# Build Tauri
- name: Run Tauri action 🚀
id: tauri-action
uses: tauri-apps/tauri-action@v0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
# APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
# APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
# APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
# APPLE_ID: ${{ secrets.APPLE_ID }}
# APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
# APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
with:
projectPath: './seanime-desktop'
args: ${{ matrix.args }}
updaterJsonPreferNsis: true
- name: Rename Tauri artifacts (UNIX) 📝
# ./
# |- seanime-desktop-darwin-arm.app.tar.gz
# |- seanime-desktop-darwin-arm.app.tar.gz.sig <- Signature
# |- seanime-desktop-darwin-intel.app.tar.gz
# |- seanime-desktop-darwin-intel.app.tar.gz.sig <- Signature
# |- seanime-desktop-linux.AppImage <- UNCOMPRESSED
# |- seanime-desktop-linux.AppImage.sig <- Signature UNCOMPRESSED
# |- seanime-desktop-windows-setup.exe <- UNCOMPRESSED
# |- seanime-desktop-windows-setup.exe.sig <- Signature UNCOMPRESSED
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
# We hardcode the macOS target triple because the macOS runner is ARM based and builds both arm64 and x86_64
run: |
if [ -f ./seanime-desktop/src-tauri/target/aarch64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ]; then
mv ./seanime-desktop/src-tauri/target/aarch64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ./seanime-desktop-darwin-arm.app.tar.gz
mv ./seanime-desktop/src-tauri/target/aarch64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz.sig ./seanime-desktop-darwin-arm.app.tar.gz.sig
elif [ -f ./seanime-desktop/src-tauri/target/x86_64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ]; then
mv ./seanime-desktop/src-tauri/target/x86_64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz ./seanime-desktop-darwin-intel.app.tar.gz
mv ./seanime-desktop/src-tauri/target/x86_64-apple-darwin/release/bundle/macos/Seanime\ Desktop.app.tar.gz.sig ./seanime-desktop-darwin-intel.app.tar.gz.sig
elif [ -f ./seanime-desktop/src-tauri/target/release/bundle/appimage/Seanime\ Desktop_${{ steps.tauri-action.outputs.appVersion }}_amd64.AppImage ]; then
mv ./seanime-desktop/src-tauri/target/release/bundle/appimage/Seanime\ Desktop_${{ steps.tauri-action.outputs.appVersion }}_amd64.AppImage ./seanime-desktop-linux.AppImage
mv ./seanime-desktop/src-tauri/target/release/bundle/appimage/Seanime\ Desktop_${{ steps.tauri-action.outputs.appVersion }}_amd64.AppImage.sig ./seanime-desktop-linux.AppImage.sig
fi
- name: Rename Tauri artifacts (Windows) 📝
if: matrix.os == 'windows-latest'
run: |
powershell -Command "Move-Item -Path './seanime-desktop/src-tauri/target/release/bundle/nsis/Seanime Desktop_${{ steps.tauri-action.outputs.appVersion }}_x64-setup.exe' -Destination './seanime-desktop-windows-setup.exe'"
powershell -Command "Move-Item -Path './seanime-desktop/src-tauri/target/release/bundle/nsis/Seanime Desktop_${{ steps.tauri-action.outputs.appVersion }}_x64-setup.exe.sig' -Destination './seanime-desktop-windows-setup.exe.sig'"
- name: Tar the Tauri artifacts (Linux) 🗃️
if: matrix.os == 'ubuntu-22.04'
# Note: The macOS artifacts are already packaged, so we don't need to compress them
# Compress the Linux AppImage, not the signature
run: |
if [ -f ./seanime-desktop-linux.AppImage ]; then
tar -czf seanime-desktop-linux.AppImage.tar.gz seanime-desktop-linux.AppImage
fi
- name: Zip the Tauri artifacts (Windows) 🗃️
if: matrix.os == 'windows-latest'
# Compress the Windows setup, not the signature
run: |
7z a seanime-desktop-windows-setup.exe.zip seanime-desktop-windows-setup.exe
# ----------------------------------------------------------------- delete
- name: Print all
if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-22.04'
run: ls -la .
- name: Print downloaded binaries
if: matrix.os == 'windows-latest'
run: dir .
# ----------------------------------------------------------------- delete
# Upload the Tauri artifacts to be used in the next job
- name: Upload tauri artifacts 📤
uses: actions/upload-artifact@v4
with:
# Artifact IDs:
# tauri-seanime-server-darwin-arm
# tauri-seanime-server-darwin-intel
# tauri-seanime-server-linux
# tauri-seanime-server-windows
name: tauri-${{ matrix.id }}
path: |
./seanime-desktop-darwin-arm.app.tar.gz
./seanime-desktop-darwin-arm.app.tar.gz.sig
./seanime-desktop-darwin-intel.app.tar.gz
./seanime-desktop-darwin-intel.app.tar.gz.sig
./seanime-desktop-linux.AppImage
./seanime-desktop-linux.AppImage.tar.gz
./seanime-desktop-linux.AppImage.sig
./seanime-desktop-windows-setup.exe
./seanime-desktop-windows-setup.exe.zip
./seanime-desktop-windows-setup.exe.sig
release:
runs-on: ubuntu-latest
needs: [ build-server, build-tauri ]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download tauri artifacts 📥
uses: actions/download-artifact@v4
with:
pattern: tauri-*
path: ./artifacts
merge-multiple: true
- name: Determine version from tag name 🔎
run: |
if [[ "$GITHUB_REF" == refs/tags/v* ]]; then
VERSION=${GITHUB_REF/refs\/tags\/v/}
echo "Version extracted from tag: $VERSION"
elif [[ "$GITHUB_REF" == refs/tags/* ]]; then
VERSION=${GITHUB_REF/refs\/tags\//}
echo "Version extracted from tag: $VERSION"
else
echo "Warning: No tag associated with this run. Defaulting to version 0.1.0."
VERSION="0.1.0"
fi
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo "Version is ${{ env.VERSION }}"
- name: Download server binaries 📥
uses: actions/download-artifact@v4
with:
pattern: go-*
path: ./artifacts
# ./artifacts
# |- binaries-seanime-server-darwin.tar (contains 2)
# |- binaries-seanime-server-linux.tar (contains 2)
# |- binaries-seanime-server-systray-windows.tar (contains 1)
merge-multiple: true
- name: Print all artifacts
run: ls -la ./artifacts
- name: Extract - Rename - Archive server binaries 📂
# ./artifacts
# |- ...
# \/ /binaries-seanime-server-darwin.tar
# |- seanime-server-darwin-amd64 -> ../seanime -> ../seanime-${{ env.VERSION }}_MacOS_arm64.tar.gz
# |- seanime-server-darwin-arm64 -> ../seanime -> ../seanime-${{ env.VERSION }}_MacOS_x86_64.tar.gz
# \/ /binaries-seanime-server-darwin.tar
# |- seanime-server-linux-amd64 -> ../seanime -> ../seanime-${{ env.VERSION }}_Linux_x86_64.tar.gz
# |- seanime-server-linux-arm64 -> ../seanime -> ../seanime-${{ env.VERSION }}_Linux_arm64.tar.gz
# \/ /binaries-seanime-server-systray-windows.tar
# |- seanime-server-systray-windows.exe -> ../seanime.exe -> ../seanime-${{ env.VERSION }}_Windows_x86_64.zip
run: |
if [ -f ./artifacts/binaries-seanime-server-darwin.tar ]; then
# Extract binaries
tar -xf ./artifacts/binaries-seanime-server-darwin.tar -C ./artifacts
# Rename & compress binaries
mv ./artifacts/seanime-server-darwin-amd64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_MacOS_x86_64.tar.gz ./seanime
rm -rf ./seanime
mv ./artifacts/seanime-server-darwin-arm64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_MacOS_arm64.tar.gz ./seanime
rm -rf ./seanime
fi
if [ -f ./artifacts/binaries-seanime-server-linux.tar ]; then
# Extract binaries
tar -xf ./artifacts/binaries-seanime-server-linux.tar -C ./artifacts
# Rename & compress binaries
mv ./artifacts/seanime-server-linux-amd64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_Linux_x86_64.tar.gz ./seanime
rm -rf ./seanime
mv ./artifacts/seanime-server-linux-arm64 ./seanime
tar czf ./seanime-${{ env.VERSION }}_Linux_arm64.tar.gz ./seanime
rm -rf ./seanime
fi
if [ -f ./artifacts/binaries-seanime-server-systray-windows.tar ]; then
# Extract binaries
tar -xf ./artifacts/binaries-seanime-server-systray-windows.tar -C ./artifacts
# Rename & compress binaries
mv ./artifacts/seanime-server-systray-windows.exe ./seanime.exe
7z a ./seanime-${{ env.VERSION }}_Windows_x86_64.zip ./seanime.exe
rm -rf ./seanime.exe
fi
shell: bash
- name: Print all artifacts
run: ls -la ./artifacts
- name: Move & Rename Tauri assets 📝🗃️
# Move Tauri assets to the root directory and rename them
# ./artifacts
# |- seanime-desktop-darwin-arm.app.tar.gz -> ../seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz
# |- seanime-desktop-darwin-arm.app.tar.gz.sig -> ../seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz.sig
# |- seanime-desktop-darwin-intel.app.tar.gz -> ../seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz
# |- seanime-desktop-darwin-intel.app.tar.gz.sig -> ../seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz.sig
# |- seanime-desktop-linux.AppImage -> ../seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage
# |- seanime-desktop-linux.AppImage.tar.gz -> ../seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.tar.gz
# |- seanime-desktop-linux.AppImage.sig -> ../seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.sig
# |- seanime-desktop-windows-setup.exe -> ../seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe
# |- seanime-desktop-windows-setup.exe.zip -> ../seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.zip
# |- seanime-desktop-windows-setup.exe.sig -> ../seanime-desktop-${{ env.VERSION }}_Windows_x86_64.sig
run: |
if [ -f ./artifacts/seanime-desktop-darwin-arm.app.tar.gz ]; then
mv ./artifacts/seanime-desktop-darwin-arm.app.tar.gz ./seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz
mv ./artifacts/seanime-desktop-darwin-arm.app.tar.gz.sig ./seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz.sig
fi
if [ -f ./artifacts/seanime-desktop-darwin-intel.app.tar.gz ]; then
mv ./artifacts/seanime-desktop-darwin-intel.app.tar.gz ./seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz
mv ./artifacts/seanime-desktop-darwin-intel.app.tar.gz.sig ./seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz.sig
fi
if [ -f ./artifacts/seanime-desktop-linux.AppImage.tar.gz ]; then
mv ./artifacts/seanime-desktop-linux.AppImage ./seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage
mv ./artifacts/seanime-desktop-linux.AppImage.tar.gz ./seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.tar.gz
mv ./artifacts/seanime-desktop-linux.AppImage.sig ./seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.sig
fi
if [ -f ./artifacts/seanime-desktop-windows-setup.exe.zip ]; then
mv ./artifacts/seanime-desktop-windows-setup.exe ./seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe
mv ./artifacts/seanime-desktop-windows-setup.exe.zip ./seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.zip
mv ./artifacts/seanime-desktop-windows-setup.exe.sig ./seanime-desktop-${{ env.VERSION }}_Windows_x86_64.sig
fi
- name: Print all
run: ls -la .
# Go
- name: Set up Go ⬇️
uses: actions/setup-go@v5
with:
go-version: '1.24.1'
# Build the Go script
- name: Build Go scripts 🛠️
run: |
go build -o generate_updater_latest ./.github/scripts/generate_updater_latest.go
go build -o generate_release_notes ./.github/scripts/generate_release_notes.go
# Run the Go scripts
- name: Generate latest.json 📦️
env:
APP_VERSION: ${{ env.VERSION }}
run: ./generate_updater_latest
- name: Generate release notes 📦️
env:
APP_VERSION: ${{ env.VERSION }}
run: ./generate_release_notes
- name: Read release notes 🔍
id: read_release_notes
run: |
BODY=$(cat whats-new.md)
echo "RELEASE_BODY<<EOF" >> $GITHUB_ENV
echo "$BODY" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Create release draft 🚀🚀🚀
id: create_release
uses: softprops/action-gh-release@v2
with:
fail_on_unmatched_files: false
files: |
latest.json
seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz
seanime-desktop-${{ env.VERSION }}_MacOS_arm64.app.tar.gz.sig
seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz
seanime-desktop-${{ env.VERSION }}_MacOS_x86_64.app.tar.gz.sig
seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage
seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.tar.gz
seanime-desktop-${{ env.VERSION }}_Linux_x86_64.AppImage.sig
seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe
seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.zip
seanime-desktop-${{ env.VERSION }}_Windows_x86_64.exe.sig
seanime-${{ env.VERSION }}_MacOS_x86_64.tar.gz
seanime-${{ env.VERSION }}_MacOS_arm64.tar.gz
seanime-${{ env.VERSION }}_Linux_x86_64.tar.gz
seanime-${{ env.VERSION }}_Linux_arm64.tar.gz
seanime-${{ env.VERSION }}_Windows_x86_64.zip
token: ${{ secrets.GITHUB_TOKEN }}
tag_name: v${{ env.VERSION }}
release_name: v${{ env.VERSION }}
draft: true
prerelease: false
body: |
## What's new?
${{ env.RELEASE_BODY }}
---
[Open an issue](https://github.com/5rahim/seanime/issues/new/choose)

91
seanime-2.9.10/.gitignore vendored Normal file
View File

@@ -0,0 +1,91 @@
.idea
.junie
logs/
*.db
.run/
testdata/
.vscode
.cursor
.DS_Store
*/.DS_Store
Dockerfile
Dockerfile.dev
dev.dockerfile
.dockerignore
go.work
_docker-compose.yml
logs
web/
web-desktop/
web-denshi/
out/
./assets/
test/testdata/**/*.json
test/sample
test/providers.json
test/db.json
test/providers.json
test/config.json
test/config.toml
whats-new.md
*/mock_data.json
TODO-priv.md
##
## Parser
##
seanime-parser/.git
##
## Web
##
seanime-web/node_modules
seanime-web/out
seanime-web/out-desktop
seanime-web/out-denshi
seanime-web/web
seanime-web/.next
seanime-web/.idea
# dependencies
seanime-web/.pnp
seanime-web/.pnp.js
seanime-web/.yarn/install-state.gz
# testing
seanime-web/coverage
# next.js
# production
seanime-web/build
# misc
seanime-web/.DS_Store
seanime-web/*.pem
# debug
seanime-web/npm-debug.log*
seanime-web/yarn-debug.log*
seanime-web/yarn-error.log*
# local env files
seanime-web/.env*.local
# vercel
seanime-web/.vercel
# typescript
seanime-web/*.tsbuildinfo
seanime-web/next-env.d.ts
seanime-web/snapshot
seanime-web/logs
seanime-web/analyze
seanime-web/TODO-priv.md
seanime-web/CHANGELOG-priv.md
codegen/generated/hooks.mdx
internal/extension_repo/goja_onlinestream_test/test1.ts
internal/extension_repo/goja_plugin_test
*.sh

View File

@@ -0,0 +1,11 @@
run:
concurrency: 4
timeout: 1m
issues-exit-code: 1
tests: true
linters:
disable-all: true
enable:
- exhaustruct

1223
seanime-2.9.10/CHANGELOG.md Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,74 @@
# Contribution Guide
All contributions are welcome _if_ they are in the scope of the project. If you're not sure about something, feel free to ask.
## Guidelines
- Make sure you are familiar with Go and React.
- Your contributions must be small and focused. If you want to add a new feature that requires substantial changes or additions to the codebase, please contact the dev first.
- Make sure your changes are in line with the project's goals (Create a feature request if you're unsure).
- Make sure your changes are well tested and do not introduce any new issues or regressions.
- You should try and make your changes against the **most active branch**, which is usually the `main` branch but
may be different when a new version is being developed.
## How to contribute
1. Create an issue before starting work on a feature or a bug fix.
2. Fork the repository, clone it, and create a new branch.
```shell
# Clone your fork of the repo
git clone https://github.com/<your-username>/seanime.git
# Navigate to the directory
cd seanime
# Assign to a remote called "upstream"
git remote add upstream https://github.com/5rahim/seanime.git
```
3. Get the latest changes from the original repository.
```shell
git fetch --all
git rebase upstream/main
```
4. Create a new branch for your feature or bug fix off of the `main` branch.
```shell
git checkout -b <feature-branch> main
```
5. Make your changes, test and commit them.
6. Locally rebase your changes on top of the latest changes from the original repository.
```shell
git pull --rebase upstream main
```
7. Push your changes to your fork.
```shell
git push -u origin <feature-branch>
```
8. Create a pull request to the `main` branch of the original repository.
9. Wait for the maintainers to review your pull request.
10. Make changes if requested.
11. Once your pull request is approved, it will be merged.
12. Keep your fork in sync with the original repository.
```shell
git fetch --all
git checkout main
git rebase upstream/main
git push -u origin main
```
## Areas
[Issues](https://github.com/5rahim/seanime/issues?q=is%3Aissue+is%3Aopen+label%3A%22open+to+contribution%22)

View File

@@ -0,0 +1,192 @@
# Seanime Development and Build Guide
## Prerequisites
- Go 1.23+
- Node.js 18+ and npm
## Build Process
### 1. Building the Web Interface
1. Build the web interface:
```bash
npm run build
```
2. After the build completes, a new `out` directory will be created inside `seanime-web`.
3. Move the contents of the `out` directory to a new `web` directory at the root of the project.
### 2. Building the Server
Choose the appropriate command based on your target platform:
1. **Windows (System Tray)**:
```bash
set CGO_ENABLED=1
go build -o seanime.exe -trimpath -ldflags="-s -w -H=windowsgui -extldflags '-static'"
```
2. **Windows (No System Tray)** - Used by the desktop app:
```bash
go build -o seanime.exe -trimpath -ldflags="-s -w" -tags=nosystray
```
3. **Linux/macOS**:
```bash
go build -o seanime -trimpath -ldflags="-s -w"
```
**Important**: The web interface must be built first before building the server.
---
## Development Guide
### Getting Started
The project is built with:
- Backend: Go server with REST API endpoints
- Frontend: React/Next.js web interface
For development, you should be familiar with both Go and React.
### Setting Up the Development Environment
#### Server Development
1. **Development environment**:
- Create a dummy directory that will be used as the data directory during development.
- Create a dummy `web` folder at the root containing at least one file, or simply do the _Building the Web Interface_ step of the build process. (This is required for the server to start.)
2. **Run the server**:
```bash
go run main.go --datadir="path/to/datadir"
```
- This will generate all the files needed in the `path/to/datadir` directory.
3. **Configure the development server**:
- Change the port in the `config.toml` located in the development data directory to `43000`. The web interface will connect to this port during development. Change the host to `0.0.0.0` to allow connections from other devices.
- Re-run the server with the updated configuration.
The server will be available at `http://127.0.0.1:43000`.
#### Web Interface Development
1. **Navigate to the web directory**:
```bash
cd seanime-web
```
2. **Install dependencies**:
```bash
npm install
```
3. **Start the development server**:
```bash
npm run dev
```
The development web interface will be accessible at `http://127.0.0.1:43210`.
**Note**: During development, the web interface is served by the Next.js development server on port `43210`.
The Next.js development environment is configured such that all requests are made to the Go server running on port `43000`.
### Understanding the Codebase Architecture
#### API and Route Handlers
The backend follows a well-defined structure:
1. **Routes Declaration**:
- All routes are registered in `internal/handlers/routes.go`
- Each route is associated with a specific handler method
2. **Handler Implementation**:
- Handler methods are defined in `internal/handlers/` directory
- Handlers are documented with comments above each declaration (similar to OpenAPI)
3. **Automated Type Generation**:
- The comments above route handlers serve as documentation for automatic type generation
- Types for the frontend are generated in:
- `seanime-web/api/generated/types.ts`
- `seanime-web/api/generated/endpoint.types.ts`
- `seanime-web/api/generated/hooks_template.ts`
#### Updating API Types
After modifying route handlers or structs used by the frontend, you must regenerate the TypeScript types:
```bash
# Run the code generator
go generate ./codegen/main.go
```
#### AniList GraphQL API Integration
The project integrates with the AniList GraphQL API:
1. **GraphQL Queries**:
- Queries are defined in `internal/anilist/queries/*.graphql`
- Generated using `gqlgenc`
2. **Updating GraphQL Schema**:
If you modify the GraphQL schema, run these commands:
```bash
go get github.com/Yamashou/gqlgenc@v0.25.4
```
```bash
cd internal/api/anilist
```
```bash
go run github.com/Yamashou/gqlgenc
```
```bash
cd ../../..
```
```bash
go mod tidy
```
3. **Client Implementation**:
- Generated queries and types are in `internal/api/anilist/client_gen.go`
- A wrapper implementation in `internal/api/anilist/client.go` provides a cleaner interface
- The wrapper also includes a mock client for testing
### Running Tests
**Important**: Run tests individually rather than all at once.
#### Test Configuration
1. Create a dummy AniList account for testing
2. Obtain an access token (from browser)
3. Create/edit `test/config.toml` using `config.example.toml` as a template
#### Writing Tests
Tests use the `test_utils` package which provides:
- `InitTestProvider` method to initialize the test configuration
- Flags to enable/disable specific test categories
Example:
```go
func TestSomething(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
// Test code here
}
```
#### Testing with Third-Party Apps
Some tests interact with applications like Transmission and qBittorrent:
- Ensure these applications are installed and running
- Configure `test/config.toml` with appropriate connection details
## Notes and Warnings
- hls.js versions 1.6.0 and above may cause appendBuffer fatal errors

674
seanime-2.9.10/LICENSE Normal file
View File

@@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

147
seanime-2.9.10/README.md Normal file
View File

@@ -0,0 +1,147 @@
<p align="center">
<a href="https://seanime.rahim.app/">
<img src="docs/images/logo_2.png" alt="preview" width="100px"/>
</a>
</p>
<h1 align="center"><b>Seanime</b></h1>
<p align="center">
<img src="docs/images/4/github-banner-sq.png" alt="preview" width="100%"/>
</p>
<p align="center">
<a href="https://seanime.rahim.app/docs">Documentation</a> |
<a href="https://github.com/5rahim/seanime/releases">Latest release</a> |
<a href="#screenshots">Screenshots</a> |
<a href="https://www.youtube.com/playlist?list=PLgQO-Ih6JClhFFdEVuNQJejyX_8iH82gl">Tutorials</a> |
<a href="https://discord.gg/Sbr7Phzt6m">Discord</a>
</p>
<div align="center">
<a href="https://github.com/5rahim/seanime/releases">
<img src="https://img.shields.io/github/v/release/5rahim/seanime?style=flat-square&color=blue" alt="" />
</a>
<a href="https://github.com/5rahim/seanime/releases">
<img src="https://img.shields.io/github/downloads/5rahim/seanime/total?style=flat-square&color=blue" alt="" />
</a>
<a href="https://github.com/sponsors/5rahim">
<img src="https://img.shields.io/static/v1?label=Sponsor&style=flat-square&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86" alt="" />
</a>
</div>
<h5 align="center">
Leave a star if you like the project! ⭐️
</h5>
## About
Seanime is a **media server** with a **web interface** and **desktop app** for watching anime, managing your local library, and reading manga.
## Features
- Cross-platform web interface and desktop app
- Complete AniList integration (browse, manage, score, discover, etc.)
- Offline mode for both anime and manga
- Scan your local library in seconds, no renaming needed
- Integrated torrent search engine
- Stream torrents directly to your media player without downloading using Bittorrent, Torbox and Real-Debrid
- Support for qBittorrent, Transmission, Torbox and Real-Debrid for downloading
- Auto-downloading for new episodes with custom filters
- MPV, VLC, MPC-HC, and mobile player app support for watching
- Transcoding and direct play for streaming to any device web browser
- Online streaming with support for multiple web sources & extensions
- Read and download manga chapters with support for multiple sources & extensions
- Extension system for adding new sources
- Schedule for tracking upcoming or missed episodes
- Customizable UI
- And more
## Get started
Read the installation guide to set up Seanime on your device.
<p align="center">
<a href="https://seanime.rahim.app/docs/getting-started" style="font-size:18px;" align="center">
How to install Seanime
</a>
</p>
## Goal
This is a one-person project and may not meet every use case. If it doesnt fully fit your needs, other tools might be a better match.
### Not planned
- Support for other providers such as Trakt, SIMKL, etc.
- Support for other media players
- Dedicated clients (TV, mobile, etc.)
- Support for other languages (translations)
Consider sponsoring or sharing the project if you want to see more features implemented.
## Sponsors
The maintenance of this project is made possible by the sponsors.
<p align="center">
<!-- real-sponsors --><a href="https://github.com/tobenaii"><img src="https:&#x2F;&#x2F;github.com&#x2F;tobenaii.png" width="60px" alt="User avatar: tobenaii" /></a><!-- real-sponsors -->
<!-- real-sponsors --><a href="https://github.com/TorBox-App"><img src="https:&#x2F;&#x2F;github.com&#x2F;TorBox-App.png" width="60px" alt="User avatar: TorBox-App" /></a><!-- real-sponsors -->
</p>
## Development and Build
Building from source is straightforward, you'll need Node.js and Go installed on your system.
Development and testing might require additional configuration.
[Read more here](https://github.com/5rahim/seanime/blob/main/DEVELOPMENT_AND_BUILD.md)
## Screenshots
### Scanning
<img src="docs/images/4/rec-scanning.gif" alt="preview" width="100%"/>
### Watching
<img src="docs/images/4/rec-anime-watching_02.gif" alt="preview" width="100%"/>
### Downloading
<img src="https://i.postimg.cc/RVpFYqYb/img-2025-02-02-17-31-41.gif" alt="preview" width="100%"/>
### Manga
<img src="docs/images/4/manga-reader_01--sq.jpg" alt="preview" width="100%"/>
### Torrent streaming
<img src="docs/images/4/rec-torrentstream.gif" alt="preview" width="100%"/>
### Debrid streaming
<img src="docs/images/4/rec-debridstream.gif" alt="preview" width="100%"/>
<details>
<summary><b>View more</b></summary>
### Online streaming
<img src="docs/images/4/online-streaming--sq.jpg" alt="preview" width="100%"/>
### Discover
<img src="docs/images/4/discover_02--sq.jpg" alt="preview" width="100%"/>
### AniList integration
<img src="docs/images/4/anilist_01--sq.jpg" alt="preview" width="100%"/>
</details>
## Disclaimer
Seanime and its developer do not host, store, or distribute any content found within the application. All content metadata, including images, are sourced from publicly available APIs such as AniList, AniDB and TheTVDB.
Furthermore, Seanime does not endorse or promote piracy in any form. It is the user's responsibility to ensure that they are in compliance with their local laws and regulations.

View File

@@ -0,0 +1,10 @@
# Codegen
Run after adding/removing/updating:
- A struct returned by a route handler.
- A route handler.
- A route endpoint.
Code is generated in the `./codegen` directory and in `../seanime-web/src/api/generated`.
Make sure the web codebase is up-to-date after running this script.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,26 @@
package codegen
import (
"seanime/internal/api/anilist"
hibiketorrent "seanime/internal/extension/hibike/torrent"
)
//type Struct1 struct {
// Struct2
//}
//
//type Struct2 struct {
// Text string `json:"text"`
//}
//type Struct3 []string
type Struct4 struct {
Torrents []hibiketorrent.AnimeTorrent `json:"torrents"`
Destination string `json:"destination"`
SmartSelect struct {
Enabled bool `json:"enabled"`
MissingEpisodeNumbers []int `json:"missingEpisodeNumbers"`
} `json:"smartSelect"`
Media *anilist.BaseAnime `json:"media"`
}

View File

@@ -0,0 +1,308 @@
package codegen
import (
"encoding/json"
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path/filepath"
"strings"
)
type (
RouteHandler struct {
Name string `json:"name"`
TrimmedName string `json:"trimmedName"`
Comments []string `json:"comments"`
Filepath string `json:"filepath"`
Filename string `json:"filename"`
Api *RouteHandlerApi `json:"api"`
}
RouteHandlerApi struct {
Summary string `json:"summary"`
Descriptions []string `json:"descriptions"`
Endpoint string `json:"endpoint"`
Methods []string `json:"methods"`
Params []*RouteHandlerParam `json:"params"`
BodyFields []*RouteHandlerParam `json:"bodyFields"`
Returns string `json:"returns"`
ReturnGoType string `json:"returnGoType"`
ReturnTypescriptType string `json:"returnTypescriptType"`
}
RouteHandlerParam struct {
Name string `json:"name"`
JsonName string `json:"jsonName"`
GoType string `json:"goType"` // e.g., []models.User
InlineStructType string `json:"inlineStructType,omitempty"` // e.g., struct{Test string `json:"test"`}
UsedStructType string `json:"usedStructType"` // e.g., models.User
TypescriptType string `json:"typescriptType"` // e.g., Array<User>
Required bool `json:"required"`
Descriptions []string `json:"descriptions"`
}
)
func GenerateHandlers(dir string, outDir string) {
handlers := make([]*RouteHandler, 0)
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
if !strings.HasSuffix(info.Name(), ".go") || strings.HasPrefix(info.Name(), "_") {
return nil
}
// Parse the file
file, err := parser.ParseFile(token.NewFileSet(), path, nil, parser.ParseComments)
if err != nil {
return err
}
for _, decl := range file.Decls {
// Check if the declaration is a function
fn, ok := decl.(*ast.FuncDecl)
if !ok {
continue
}
// Check if the function has comments
if fn.Doc == nil {
continue
}
// Get the comments
comments := strings.Split(fn.Doc.Text(), "\n")
if len(comments) == 0 {
continue
}
// Get the function name
name := fn.Name.Name
trimmedName := strings.TrimPrefix(name, "Handle")
// Get the filename
filep := strings.ReplaceAll(strings.ReplaceAll(path, "\\", "/"), "../", "")
filename := filepath.Base(path)
// Get the endpoint
endpoint := ""
var methods []string
params := make([]*RouteHandlerParam, 0)
summary := ""
descriptions := make([]string, 0)
returns := "bool"
for _, comment := range comments {
cmt := strings.TrimSpace(strings.TrimPrefix(comment, "//"))
if strings.HasPrefix(cmt, "@summary") {
summary = strings.TrimSpace(strings.TrimPrefix(cmt, "@summary"))
}
if strings.HasPrefix(cmt, "@desc") {
descriptions = append(descriptions, strings.TrimSpace(strings.TrimPrefix(cmt, "@desc")))
}
if strings.HasPrefix(cmt, "@route") {
endpointParts := strings.Split(strings.TrimSpace(strings.TrimPrefix(cmt, "@route")), " ")
if len(endpointParts) == 2 {
endpoint = endpointParts[0]
methods = strings.Split(endpointParts[1][1:len(endpointParts[1])-1], ",")
}
}
if strings.HasPrefix(cmt, "@param") {
paramParts := strings.Split(strings.TrimSpace(strings.TrimPrefix(cmt, "@param")), " - ")
if len(paramParts) == 4 {
required := paramParts[2] == "true"
params = append(params, &RouteHandlerParam{
Name: paramParts[0],
JsonName: paramParts[0],
GoType: paramParts[1],
TypescriptType: goTypeToTypescriptType(paramParts[1]),
Required: required,
Descriptions: []string{strings.ReplaceAll(paramParts[3], "\"", "")},
})
}
}
if strings.HasPrefix(cmt, "@returns") {
returns = strings.TrimSpace(strings.TrimPrefix(cmt, "@returns"))
}
}
bodyFields := make([]*RouteHandlerParam, 0)
// To get the request body fields, we need to look at the function body for a struct called "body"
// Get the function body
body := fn.Body
if body != nil {
for _, stmt := range body.List {
// Check if the statement is a declaration
declStmt, ok := stmt.(*ast.DeclStmt)
if !ok {
continue
}
// Check if the declaration is a gen decl
genDecl, ok := declStmt.Decl.(*ast.GenDecl)
if !ok {
continue
}
// Check if the declaration is a type
if genDecl.Tok != token.TYPE {
continue
}
// Check if the type is a struct
if len(genDecl.Specs) != 1 {
continue
}
typeSpec, ok := genDecl.Specs[0].(*ast.TypeSpec)
if !ok {
continue
}
structType, ok := typeSpec.Type.(*ast.StructType)
if !ok {
continue
}
// Check if the struct is called "body"
if typeSpec.Name.Name != "body" {
continue
}
// Get the fields
for _, field := range structType.Fields.List {
// Get the field name
fieldName := field.Names[0].Name
// Get the field type
fieldType := field.Type
jsonName := fieldName
// Get the field tag
required := !jsonFieldOmitEmpty(field)
jsonField := jsonFieldName(field)
if jsonField != "" {
jsonName = jsonField
}
// Get field comments
fieldComments := make([]string, 0)
cmtsTxt := field.Doc.Text()
if cmtsTxt != "" {
fieldComments = strings.Split(cmtsTxt, "\n")
}
for _, cmt := range fieldComments {
cmt = strings.TrimSpace(strings.TrimPrefix(cmt, "//"))
if cmt != "" {
fieldComments = append(fieldComments, cmt)
}
}
switch fieldType.(type) {
case *ast.StarExpr:
required = false
}
goType := fieldTypeString(fieldType)
goTypeUnformatted := fieldTypeUnformattedString(fieldType)
packageName := "handlers"
if strings.Contains(goTypeUnformatted, ".") {
parts := strings.Split(goTypeUnformatted, ".")
packageName = parts[0]
}
tsType := fieldTypeToTypescriptType(fieldType, packageName)
usedStructType := goTypeUnformatted
switch goTypeUnformatted {
case "string", "int", "int64", "float64", "float32", "bool", "nil", "uint", "uint64", "uint32", "uint16", "uint8", "byte", "rune", "[]byte", "interface{}", "error":
usedStructType = ""
}
// Add the request body field
bodyFields = append(bodyFields, &RouteHandlerParam{
Name: fieldName,
JsonName: jsonName,
GoType: goType,
UsedStructType: usedStructType,
TypescriptType: tsType,
Required: required,
Descriptions: fieldComments,
})
// Check if it's an inline struct and capture its definition
if structType, ok := fieldType.(*ast.StructType); ok {
bodyFields[len(bodyFields)-1].InlineStructType = formatInlineStruct(structType)
} else {
// Check if it's a slice of inline structs
if arrayType, ok := fieldType.(*ast.ArrayType); ok {
if structType, ok := arrayType.Elt.(*ast.StructType); ok {
bodyFields[len(bodyFields)-1].InlineStructType = "[]" + formatInlineStruct(structType)
}
}
// Check if it's a map with inline struct values
if mapType, ok := fieldType.(*ast.MapType); ok {
if structType, ok := mapType.Value.(*ast.StructType); ok {
bodyFields[len(bodyFields)-1].InlineStructType = "map[" + fieldTypeString(mapType.Key) + "]" + formatInlineStruct(structType)
}
}
}
}
}
}
// Add the route handler
routeHandler := &RouteHandler{
Name: name,
TrimmedName: trimmedName,
Comments: comments,
Filepath: filep,
Filename: filename,
Api: &RouteHandlerApi{
Summary: summary,
Descriptions: descriptions,
Endpoint: endpoint,
Methods: methods,
Params: params,
BodyFields: bodyFields,
Returns: returns,
ReturnGoType: getUnformattedGoType(returns),
ReturnTypescriptType: stringGoTypeToTypescriptType(returns),
},
}
handlers = append(handlers, routeHandler)
}
return nil
})
if err != nil {
panic(err)
}
// Write structs to file
_ = os.MkdirAll(outDir, os.ModePerm)
file, err := os.Create(outDir + "/handlers.json")
if err != nil {
fmt.Println("Error:", err)
return
}
defer file.Close()
encoder := json.NewEncoder(file)
encoder.SetIndent("", " ")
if err := encoder.Encode(handlers); err != nil {
fmt.Println("Error:", err)
return
}
return
}

View File

@@ -0,0 +1,146 @@
package codegen
import (
"encoding/json"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
)
// GenerateHandlerHookEvents generates hook_events.go file for handlers
func GenerateHandlerHookEvents(handlersJsonPath string, outputDir string) {
// Create output directory if it doesn't exist
err := os.MkdirAll(outputDir, os.ModePerm)
if err != nil {
panic(err)
}
// Read handlers.json
handlersJson, err := os.ReadFile(handlersJsonPath)
if err != nil {
panic(err)
}
// Parse handlers.json
var handlers []RouteHandler
err = json.Unmarshal(handlersJson, &handlers)
if err != nil {
panic(err)
}
// Create hook_events.go file
outFilePath := filepath.Join(outputDir, "hook_events.go")
f, err := os.Create(outFilePath)
if err != nil {
panic(err)
}
defer f.Close()
// Write package declaration and imports
f.WriteString("package handlers\n\n")
f.WriteString("import (\n")
//f.WriteString("\t\"seanime/internal/hook_resolver\"\n")
imports := []string{
"\"seanime/internal/api/anilist\"",
"\"seanime/internal/api/tvdb\"",
"\"seanime/internal/continuity\"",
"\"seanime/internal/database/models\"",
"\"seanime/internal/debrid/client\"",
"\"seanime/internal/debrid/debrid\"",
"\"seanime/internal/extension\"",
"hibikemanga \"seanime/internal/extension/hibike/manga\"",
"hibikeonlinestream \"seanime/internal/extension/hibike/onlinestream\"",
"hibiketorrent \"seanime/internal/extension/hibike/torrent\"",
"\"seanime/internal/extension_playground\"",
"\"seanime/internal/extension_repo\"",
"\"seanime/internal/hook_resolver\"",
"\"seanime/internal/library/anime\"",
"\"seanime/internal/library/summary\"",
"\"seanime/internal/manga\"",
"\"seanime/internal/manga/downloader\"",
"\"seanime/internal/mediastream\"",
"\"seanime/internal/onlinestream\"",
"\"seanime/internal/report\"",
"\"seanime/internal/sync\"",
"\"seanime/internal/torrent_clients/torrent_client\"",
"\"seanime/internal/torrents/torrent\"",
"\"seanime/internal/torrentstream\"",
"\"seanime/internal/updater\"",
}
for _, imp := range imports {
f.WriteString("\t" + imp + "\n")
}
f.WriteString(")\n\n")
// Generate events for each handler
for _, handler := range handlers {
// Skip if handler name is empty or doesn't start with 'Handle'
if handler.Name == "" || !strings.HasPrefix(handler.Name, "Handle") {
continue
}
// Generate the "Requested" event
f.WriteString(fmt.Sprintf("// %sRequestedEvent is triggered when %s is requested.\n", handler.Name, handler.TrimmedName))
f.WriteString("// Prevent default to skip the default behavior and return your own data.\n")
f.WriteString(fmt.Sprintf("type %sRequestedEvent struct {\n", handler.Name))
f.WriteString("\thook_resolver.Event\n")
// Add path parameters
for _, param := range handler.Api.Params {
f.WriteString(fmt.Sprintf("\t%s %s `json:\"%s\"`\n", pascalCase(param.Name), param.GoType, param.JsonName))
}
// Add body fields
for _, field := range handler.Api.BodyFields {
goType := field.GoType
if goType == "__STRUCT__" || goType == "[]__STRUCT__" || (strings.HasPrefix(goType, "map[") && strings.Contains(goType, "__STRUCT__")) {
goType = field.InlineStructType
}
goType = strings.Replace(goType, "handlers.", "", 1)
addPointer := isCustomStruct(goType)
if addPointer {
goType = "*" + goType
}
f.WriteString(fmt.Sprintf("\t%s %s `json:\"%s\"`\n", pascalCase(field.Name), goType, field.JsonName))
}
// If handler returns something other than bool or true, add a Data field to store the result
if handler.Api.ReturnGoType != "" && handler.Api.ReturnGoType != "true" && handler.Api.ReturnGoType != "bool" {
returnGoType := strings.Replace(handler.Api.ReturnGoType, "handlers.", "", 1)
addPointer := isCustomStruct(returnGoType)
if addPointer {
returnGoType = "*" + returnGoType
}
f.WriteString(fmt.Sprintf("\t// Empty data object, will be used if the hook prevents the default behavior\n"))
f.WriteString(fmt.Sprintf("\tData %s `json:\"data\"`\n", returnGoType))
}
f.WriteString("}\n\n")
// Generate the response event if handler returns something other than bool or true
if handler.Api.ReturnGoType != "" && handler.Api.ReturnGoType != "true" && handler.Api.ReturnGoType != "bool" {
returnGoType := strings.Replace(handler.Api.ReturnGoType, "handlers.", "", 1)
addPointer := isCustomStruct(returnGoType)
if addPointer {
returnGoType = "*" + returnGoType
}
f.WriteString(fmt.Sprintf("// %sEvent is triggered after processing %s.\n", handler.Name, handler.TrimmedName))
f.WriteString(fmt.Sprintf("type %sEvent struct {\n", handler.Name))
f.WriteString("\thook_resolver.Event\n")
f.WriteString(fmt.Sprintf("\tData %s `json:\"data\"`\n", returnGoType))
f.WriteString("}\n\n")
}
}
cmd := exec.Command("gofmt", "-w", outFilePath)
cmd.Run()
}
func pascalCase(s string) string {
return strings.ReplaceAll(strings.Title(strings.ReplaceAll(s, "_", " ")), " ", "")
}

View File

@@ -0,0 +1,797 @@
package codegen
import (
"cmp"
"encoding/json"
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path/filepath"
"slices"
"strings"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
var (
additionalStructNamesForHooks = []string{
"discordrpc_presence.MangaActivity",
"discordrpc_presence.AnimeActivity",
"discordrpc_presence.LegacyAnimeActivity",
"anilist.ListAnime",
"anilist.ListManga",
"anilist.MediaSort",
"anilist.ListRecentAnime",
"anilist.AnimeCollectionWithRelations",
"onlinestream.Episode",
"continuity.WatchHistoryItem",
"continuity.WatchHistoryItemResponse",
"continuity.UpdateWatchHistoryItemOptions",
"continuity.WatchHistory",
"torrent_client.Torrent",
}
)
func GeneratePluginEventFile(inFilePath string, outDir string) {
// Parse the input file
file, err := parser.ParseFile(token.NewFileSet(), inFilePath, nil, parser.ParseComments)
if err != nil {
panic(err)
}
// Create output directory if it doesn't exist
_ = os.MkdirAll(outDir, os.ModePerm)
const OutFileName = "plugin-events.ts"
// Create output file
f, err := os.Create(filepath.Join(outDir, OutFileName))
if err != nil {
panic(err)
}
defer f.Close()
// Write imports
f.WriteString(`// This file is auto-generated. Do not edit.
import { useWebsocketPluginMessageListener, useWebsocketSender } from "@/app/(main)/_hooks/handle-websockets"
import { useCallback } from "react"
`)
// Extract client and server event types
clientEvents := make([]string, 0)
serverEvents := make([]string, 0)
clientPayloads := make(map[string]string)
serverPayloads := make(map[string]string)
clientEventValues := make(map[string]string)
serverEventValues := make(map[string]string)
for _, decl := range file.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
// Find const declarations
if genDecl.Tok == token.CONST {
for _, spec := range genDecl.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
continue
}
if len(valueSpec.Names) == 1 && len(valueSpec.Values) == 1 {
name := valueSpec.Names[0].Name
if strings.HasPrefix(name, "Client") && strings.HasSuffix(name, "Event") {
eventName := name[len("Client") : len(name)-len("Event")]
// Get the string literal value for the enum
if basicLit, ok := valueSpec.Values[0].(*ast.BasicLit); ok {
eventValue := strings.Trim(basicLit.Value, "\"")
clientEvents = append(clientEvents, eventName)
// Get payload type name
payloadType := name + "Payload"
clientPayloads[eventName] = payloadType
// Store the original string value
clientEventValues[eventName] = eventValue
}
} else if strings.HasPrefix(name, "Server") && strings.HasSuffix(name, "Event") {
eventName := name[len("Server") : len(name)-len("Event")]
// Get the string literal value for the enum
if basicLit, ok := valueSpec.Values[0].(*ast.BasicLit); ok {
eventValue := strings.Trim(basicLit.Value, "\"")
serverEvents = append(serverEvents, eventName)
// Get payload type name
payloadType := name + "Payload"
serverPayloads[eventName] = payloadType
// Store the original string value
serverEventValues[eventName] = eventValue
}
}
}
}
}
}
// Write enums
f.WriteString("export enum PluginClientEvents {\n")
for _, event := range clientEvents {
enumName := toPascalCase(event)
f.WriteString(fmt.Sprintf(" %s = \"%s\",\n", enumName, clientEventValues[event]))
}
f.WriteString("}\n\n")
f.WriteString("export enum PluginServerEvents {\n")
for _, event := range serverEvents {
enumName := toPascalCase(event)
f.WriteString(fmt.Sprintf(" %s = \"%s\",\n", enumName, serverEventValues[event]))
}
f.WriteString("}\n\n")
// Write client to server section
f.WriteString("/////////////////////////////////////////////////////////////////////////////////////\n")
f.WriteString("// Client to server\n")
f.WriteString("/////////////////////////////////////////////////////////////////////////////////////\n\n")
// Write client event types and hooks
for _, event := range clientEvents {
// Get the payload type
payloadType := clientPayloads[event]
payloadFound := false
// Find the payload type in the AST
for _, decl := range file.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
if genDecl.Tok == token.TYPE {
for _, spec := range genDecl.Specs {
typeSpec, ok := spec.(*ast.TypeSpec)
if !ok {
continue
}
if typeSpec.Name.Name == payloadType {
payloadFound = true
// Write the payload type
f.WriteString(fmt.Sprintf("export type Plugin_Client_%sEventPayload = {\n", toPascalCase(event)))
if structType, ok := typeSpec.Type.(*ast.StructType); ok {
for _, field := range structType.Fields.List {
if len(field.Names) > 0 {
fieldName := jsonFieldName(field)
fieldType := fieldTypeToTypescriptType(field.Type, "")
f.WriteString(fmt.Sprintf(" %s: %s\n", fieldName, fieldType))
}
}
}
f.WriteString("}\n\n")
// Write the hook
hookName := fmt.Sprintf("usePluginSend%sEvent", toPascalCase(event))
f.WriteString(fmt.Sprintf("export function %s() {\n", hookName))
f.WriteString(" const { sendPluginMessage } = useWebsocketSender()\n")
f.WriteString("\n")
f.WriteString(fmt.Sprintf(" const send%sEvent = useCallback((payload: Plugin_Client_%sEventPayload, extensionID?: string) => {\n",
toPascalCase(event), toPascalCase(event)))
f.WriteString(fmt.Sprintf(" sendPluginMessage(PluginClientEvents.%s, payload, extensionID)\n",
toPascalCase(event)))
f.WriteString(" }, [])\n")
f.WriteString("\n")
f.WriteString(" return {\n")
f.WriteString(fmt.Sprintf(" send%sEvent,\n", toPascalCase(event)))
f.WriteString(" }\n")
f.WriteString("}\n\n")
}
}
}
}
// If payload type not found, write empty object type
if !payloadFound {
f.WriteString(fmt.Sprintf("export type Plugin_Client_%sEventPayload = {}\n\n", toPascalCase(event)))
// Write the hook
hookName := fmt.Sprintf("usePluginSend%sEvent", toPascalCase(event))
f.WriteString(fmt.Sprintf("export function %s() {\n", hookName))
f.WriteString(" const { sendPluginMessage } = useWebsocketSender()\n")
f.WriteString("\n")
f.WriteString(fmt.Sprintf(" const sendPlugin%sEvent = useCallback((payload: Plugin_Client_%sEventPayload, extensionID?: string) => {\n",
toPascalCase(event), toPascalCase(event)))
f.WriteString(fmt.Sprintf(" sendPluginMessage(PluginClientEvents.%s, payload, extensionID)\n",
toPascalCase(event)))
f.WriteString(" }, [])\n")
f.WriteString("\n")
f.WriteString(" return {\n")
f.WriteString(fmt.Sprintf(" send%sEvent,\n", toPascalCase(event)))
f.WriteString(" }\n")
f.WriteString("}\n\n")
}
}
// Write server to client section
f.WriteString("/////////////////////////////////////////////////////////////////////////////////////\n")
f.WriteString("// Server to client\n")
f.WriteString("/////////////////////////////////////////////////////////////////////////////////////\n\n")
// Write server event types and hooks
for _, event := range serverEvents {
// Get the payload type
payloadType := serverPayloads[event]
payloadFound := false
// Find the payload type in the AST
for _, decl := range file.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
if genDecl.Tok == token.TYPE {
for _, spec := range genDecl.Specs {
typeSpec, ok := spec.(*ast.TypeSpec)
if !ok {
continue
}
if typeSpec.Name.Name == payloadType {
payloadFound = true
// Write the payload type
f.WriteString(fmt.Sprintf("export type Plugin_Server_%sEventPayload = {\n", toPascalCase(event)))
if structType, ok := typeSpec.Type.(*ast.StructType); ok {
for _, field := range structType.Fields.List {
if len(field.Names) > 0 {
fieldName := jsonFieldName(field)
fieldType := fieldTypeToTypescriptType(field.Type, "")
f.WriteString(fmt.Sprintf(" %s: %s\n", fieldName, fieldType))
}
}
}
f.WriteString("}\n\n")
// Write the hook
hookName := fmt.Sprintf("usePluginListen%sEvent", toPascalCase(event))
f.WriteString(fmt.Sprintf("export function %s(cb: (payload: Plugin_Server_%sEventPayload, extensionId: string) => void, extensionID: string) {\n",
hookName, toPascalCase(event)))
f.WriteString(" return useWebsocketPluginMessageListener<Plugin_Server_" + toPascalCase(event) + "EventPayload>({\n")
f.WriteString(" extensionId: extensionID,\n")
f.WriteString(fmt.Sprintf(" type: PluginServerEvents.%s,\n", toPascalCase(event)))
f.WriteString(" onMessage: cb,\n")
f.WriteString(" })\n")
f.WriteString("}\n\n")
}
}
}
}
// If payload type not found, write empty object type
if !payloadFound {
f.WriteString(fmt.Sprintf("export type Plugin_Server_%sEventPayload = {}\n\n", toPascalCase(event)))
// Write the hook
hookName := fmt.Sprintf("usePluginListen%sEvent", toPascalCase(event))
f.WriteString(fmt.Sprintf("export function %s(cb: (payload: Plugin_Server_%sEventPayload, extensionId: string) => void, extensionID: string) {\n",
hookName, toPascalCase(event)))
f.WriteString(" return useWebsocketPluginMessageListener<Plugin_Server_" + toPascalCase(event) + "EventPayload>({\n")
f.WriteString(" extensionId: extensionID,\n")
f.WriteString(fmt.Sprintf(" type: PluginServerEvents.%s,\n", toPascalCase(event)))
f.WriteString(" onMessage: cb,\n")
f.WriteString(" })\n")
f.WriteString("}\n\n")
}
}
}
var execptions = map[string]string{
"playbackmanager": "PlaybackManager ",
}
func toPascalCase(s string) string {
if exception, ok := execptions[s]; ok {
return exception
}
s = strings.ReplaceAll(s, "-", " ")
s = strings.ReplaceAll(s, "_", " ")
s = cases.Title(language.English, cases.NoLower).String(s)
return strings.ReplaceAll(s, " ", "")
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type HookEventDefinition struct {
Package string `json:"package"`
GoStruct *GoStruct `json:"goStruct"`
}
func GeneratePluginHooksDefinitionFile(outDir string, publicStructsFilePath string, genOutDir string) {
// Create output file
f, err := os.Create(filepath.Join(outDir, "app.d.ts"))
if err != nil {
panic(err)
}
defer f.Close()
mdFile, err := os.Create(filepath.Join(genOutDir, "hooks.mdx"))
if err != nil {
panic(err)
}
defer mdFile.Close()
goStructs := LoadPublicStructs(publicStructsFilePath)
// e.g. map["models.User"]*GoStruct
goStructsMap := make(map[string]*GoStruct)
for _, goStruct := range goStructs {
goStructsMap[goStruct.Package+"."+goStruct.Name] = goStruct
}
// Expand the structs with embedded structs
for _, goStruct := range goStructs {
for _, embeddedStructType := range goStruct.EmbeddedStructTypes {
if embeddedStructType != "" {
if usedStruct, ok := goStructsMap[embeddedStructType]; ok {
for _, usedField := range usedStruct.Fields {
goStruct.Fields = append(goStruct.Fields, usedField)
}
}
}
}
}
// Key = package
eventGoStructsMap := make(map[string][]*GoStruct)
for _, goStruct := range goStructs {
if goStruct.Filename == "hook_events.go" {
if _, ok := eventGoStructsMap[goStruct.Package]; !ok {
eventGoStructsMap[goStruct.Package] = make([]*GoStruct, 0)
}
eventGoStructsMap[goStruct.Package] = append(eventGoStructsMap[goStruct.Package], goStruct)
}
}
// Create `hooks.json`
hookEventDefinitions := make([]*HookEventDefinition, 0)
for _, goStruct := range goStructs {
if goStruct.Filename == "hook_events.go" {
hookEventDefinitions = append(hookEventDefinitions, &HookEventDefinition{
Package: goStruct.Package,
GoStruct: goStruct,
})
}
}
jsonFile, err := os.Create(filepath.Join(genOutDir, "hooks.json"))
if err != nil {
panic(err)
}
defer jsonFile.Close()
encoder := json.NewEncoder(jsonFile)
encoder.SetIndent("", " ")
if err := encoder.Encode(hookEventDefinitions); err != nil {
fmt.Println("Error:", err)
return
}
////////////////////////////////////////////////////
// Write `app.d.ts`
// Write namespace declaration
////////////////////////////////////////////////////
f.WriteString("declare namespace $app {\n")
packageNames := make([]string, 0)
for packageName := range eventGoStructsMap {
packageNames = append(packageNames, packageName)
}
slices.Sort(packageNames)
//////////////////////////////////////////////////////////
// Get referenced structs so we can write them at the end
//////////////////////////////////////////////////////////
sharedStructs := make([]*GoStruct, 0)
otherStructs := make([]*GoStruct, 0)
// Go through all the event structs' fields, and get the types that are structs
sharedStructsMap := make(map[string]*GoStruct)
for _, goStructs := range eventGoStructsMap {
for _, goStruct := range goStructs {
for _, field := range goStruct.Fields {
if isCustomStruct(field.GoType) {
if _, ok := sharedStructsMap[field.GoType]; !ok && goStructsMap[field.UsedStructType] != nil {
sharedStructsMap[field.UsedStructType] = goStructsMap[field.UsedStructType]
}
}
}
}
}
// Add additional structs to otherStructs
for _, structName := range additionalStructNamesForHooks {
if _, ok := sharedStructsMap[structName]; !ok {
sharedStructsMap[structName] = goStructsMap[structName]
}
}
for _, goStruct := range sharedStructsMap {
//fmt.Println(goStruct.FormattedName)
if goStruct.Package != "" {
sharedStructs = append(sharedStructs, goStruct)
}
}
referencedStructsMap, ok := getReferencedStructsRecursively(sharedStructs, otherStructs, goStructsMap)
if !ok {
panic("Failed to get referenced structs")
}
for _, packageName := range packageNames {
writePackageEventGoStructs(f, packageName, eventGoStructsMap[packageName], goStructsMap)
}
f.WriteString(" ///////////////////////////////////////////////////////////////////////////////////////////////////////////////\n")
f.WriteString(" ///////////////////////////////////////////////////////////////////////////////////////////////////////////////\n")
f.WriteString(" ///////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n")
referencedStructs := make([]*GoStruct, 0)
for _, goStruct := range referencedStructsMap {
//fmt.Println(goStruct.FormattedName)
referencedStructs = append(referencedStructs, goStruct)
}
slices.SortFunc(referencedStructs, func(a, b *GoStruct) int {
return strings.Compare(a.FormattedName, b.FormattedName)
})
// Write the shared structs at the end
for _, goStruct := range referencedStructs {
if goStruct.Package != "" {
writeEventTypescriptType(f, goStruct, make(map[string]*GoStruct))
}
}
f.WriteString("}\n")
// Generate markdown documentation
writeMarkdownFile(mdFile, hookEventDefinitions, referencedStructsMap, referencedStructs)
}
func writePackageEventGoStructs(f *os.File, packageName string, goStructs []*GoStruct, allGoStructs map[string]*GoStruct) {
// Header comment block
f.WriteString(fmt.Sprintf("\n /**\n * @package %s\n */\n\n", packageName))
// Declare the hook functions
for _, goStruct := range goStructs {
// Write comments
comments := ""
comments += fmt.Sprintf("\n * @event %s\n", goStruct.Name)
comments += fmt.Sprintf(" * @file %s\n", strings.TrimPrefix(goStruct.Filepath, "../"))
shouldAddPreventDefault := false
if len(goStruct.Comments) > 0 {
comments += fmt.Sprintf(" * @description\n")
}
for _, comment := range goStruct.Comments {
if strings.Contains(strings.ToLower(comment), "prevent default") {
shouldAddPreventDefault = true
}
comments += fmt.Sprintf(" * %s\n", strings.TrimSpace(comment))
}
f.WriteString(fmt.Sprintf(" /**%s */\n", comments))
//////// Write hook function
f.WriteString(fmt.Sprintf(" function on%s(cb: (event: %s) => void): void;\n\n", strings.TrimSuffix(goStruct.Name, "Event"), goStruct.Name))
/////// Write event interface
f.WriteString(fmt.Sprintf(" interface %s {\n", goStruct.Name))
f.WriteString(fmt.Sprintf(" next(): void;\n\n"))
if shouldAddPreventDefault {
f.WriteString(fmt.Sprintf(" preventDefault(): void;\n\n"))
}
// Write the fields
for _, field := range goStruct.Fields {
if field.Name == "next" || field.Name == "preventDefault" || field.Name == "DefaultPrevented" {
continue
}
if field.JsonName == "" {
continue
}
// Field type
fieldNameSuffix := ""
if !field.Required {
fieldNameSuffix = "?"
}
if len(field.Comments) > 0 {
f.WriteString(fmt.Sprintf(" /**\n"))
for _, cmt := range field.Comments {
f.WriteString(fmt.Sprintf(" * %s\n", strings.TrimSpace(cmt)))
}
f.WriteString(fmt.Sprintf(" */\n"))
}
typeText := field.TypescriptType
f.WriteString(fmt.Sprintf(" %s%s: %s;\n", field.JsonName, fieldNameSuffix, typeText))
}
f.WriteString(fmt.Sprintf(" }\n\n"))
}
}
func writeEventTypescriptType(f *os.File, goStruct *GoStruct, writtenTypes map[string]*GoStruct) {
f.WriteString(" /**\n")
f.WriteString(fmt.Sprintf(" * - Filepath: %s\n", strings.TrimPrefix(goStruct.Filepath, "../")))
if len(goStruct.Comments) > 0 {
f.WriteString(fmt.Sprintf(" * @description\n"))
for _, cmt := range goStruct.Comments {
f.WriteString(fmt.Sprintf(" * %s\n", strings.TrimSpace(cmt)))
}
}
f.WriteString(" */\n")
if len(goStruct.Fields) > 0 {
f.WriteString(fmt.Sprintf(" interface %s {\n", goStruct.FormattedName))
for _, field := range goStruct.Fields {
fieldNameSuffix := ""
if !field.Required {
fieldNameSuffix = "?"
}
if field.JsonName == "" {
continue
}
if len(field.Comments) > 0 {
f.WriteString(fmt.Sprintf(" /**\n"))
for _, cmt := range field.Comments {
f.WriteString(fmt.Sprintf(" * %s\n", strings.TrimSpace(cmt)))
}
f.WriteString(fmt.Sprintf(" */\n"))
}
typeText := field.TypescriptType
if typeText == "Habari_Metadata" {
typeText = "$habari.Metadata"
}
f.WriteString(fmt.Sprintf(" %s%s: %s;\n", field.JsonName, fieldNameSuffix, typeText))
}
f.WriteString(" }\n\n")
}
if goStruct.AliasOf != nil {
if goStruct.AliasOf.DeclaredValues != nil && len(goStruct.AliasOf.DeclaredValues) > 0 {
union := ""
if len(goStruct.AliasOf.DeclaredValues) > 5 {
union = strings.Join(goStruct.AliasOf.DeclaredValues, " |\n ")
} else {
union = strings.Join(goStruct.AliasOf.DeclaredValues, " | ")
}
f.WriteString(fmt.Sprintf(" export type %s = %s;\n\n", goStruct.FormattedName, union))
} else {
f.WriteString(fmt.Sprintf(" export type %s = %s;\n\n", goStruct.FormattedName, goStruct.AliasOf.TypescriptType))
}
}
// Add the struct to the written types
writtenTypes[goStruct.Package+"."+goStruct.Name] = goStruct
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// writeMarkdownFile generates a well-formatted Markdown documentation for hooks
func writeMarkdownFile(mdFile *os.File, hookEventDefinitions []*HookEventDefinition, referencedStructsMap map[string]*GoStruct, referencedStructs []*GoStruct) {
mdFile.WriteString("---\n")
mdFile.WriteString("title: Hooks\n")
mdFile.WriteString("description: How to use hooks\n")
mdFile.WriteString("---")
mdFile.WriteString("\n\n")
// Group hooks by package
packageHooks := make(map[string][]*HookEventDefinition)
for _, hook := range hookEventDefinitions {
packageHooks[hook.Package] = append(packageHooks[hook.Package], hook)
}
// Sort packages alphabetically
packageNames := make([]string, 0, len(packageHooks))
for pkg := range packageHooks {
packageNames = append(packageNames, pkg)
}
slices.Sort(packageNames)
// Write each package section
for _, pkg := range packageNames {
hooks := packageHooks[pkg]
mdFile.WriteString(fmt.Sprintf("<a id=\"%s\"></a>\n", pkg))
mdFile.WriteString(fmt.Sprintf("# %s\n\n", toPascalCase(pkg)))
// Write each hook in the package
for _, hook := range hooks {
goStruct := hook.GoStruct
eventName := goStruct.Name
hookName := fmt.Sprintf("on%s", strings.TrimSuffix(eventName, "Event"))
mdFile.WriteString(fmt.Sprintf("<a id=\"on%s\"></a>\n", strings.ToLower(strings.TrimSuffix(eventName, "Event"))))
mdFile.WriteString(fmt.Sprintf("## %s\n\n", hookName))
// Write description
if len(goStruct.Comments) > 0 {
for _, comment := range goStruct.Comments {
mdFile.WriteString(fmt.Sprintf("%s\n", strings.TrimSpace(comment)))
}
mdFile.WriteString("\n")
}
// Check if it has preventDefault
hasPreventDefault := false
for _, comment := range goStruct.Comments {
if strings.Contains(strings.ToLower(comment), "prevent default") {
hasPreventDefault = true
break
}
}
if hasPreventDefault {
mdFile.WriteString("**Can prevent default:** Yes\n\n")
} else {
mdFile.WriteString("**Can prevent default:** No\n\n")
}
// Write event interface
mdFile.WriteString("**Event Interface:**\n\n")
mdFile.WriteString("```typescript\n")
mdFile.WriteString(fmt.Sprintf("interface %s {\n", eventName))
mdFile.WriteString(" next();\n")
if hasPreventDefault {
mdFile.WriteString(" preventDefault();\n")
}
// Write fields
for _, field := range goStruct.Fields {
if field.Name == "next" || field.Name == "preventDefault" || field.Name == "DefaultPrevented" {
continue
}
if field.JsonName == "" {
continue
}
fieldNameSuffix := ""
if !field.Required {
fieldNameSuffix = "?"
}
// Add comments if available
if len(field.Comments) > 0 {
mdFile.WriteString("\n /**\n")
for _, comment := range field.Comments {
mdFile.WriteString(fmt.Sprintf(" * %s\n", strings.TrimSpace(comment)))
}
mdFile.WriteString(" */\n")
}
mdFile.WriteString(fmt.Sprintf(" %s%s: %s;\n", field.JsonName, fieldNameSuffix, field.TypescriptType))
}
mdFile.WriteString("}\n")
mdFile.WriteString("```\n\n")
referenced := make([]*GoStruct, 0)
for _, field := range goStruct.Fields {
if !isCustomStruct(field.GoType) {
continue
}
goStruct, ok := referencedStructsMap[field.UsedStructType]
if !ok {
continue
}
referenced = append(referenced, goStruct)
}
// Add a list of referenced structs links
if len(referenced) > 0 {
mdFile.WriteString("**Event types:**\n\n")
}
for _, goStruct := range referenced {
mdFile.WriteString(fmt.Sprintf("- [%s](#%s)\n", goStruct.FormattedName, goStruct.FormattedName))
}
mdFile.WriteString("\n")
// Add example usage
mdFile.WriteString("**Example:**\n\n")
mdFile.WriteString("```typescript\n")
mdFile.WriteString(fmt.Sprintf("$app.%s((e) => {\n", hookName))
// Generate example code based on fields
for _, field := range goStruct.Fields {
if field.Name == "next" || field.Name == "preventDefault" || field.Name == "DefaultPrevented" {
continue
}
mdFile.WriteString(fmt.Sprintf(" // console.log(e.%s);\n", field.JsonName))
}
if hasPreventDefault {
mdFile.WriteString("\n // Prevent default behavior if needed\n")
mdFile.WriteString(" // e.preventDefault();\n")
}
mdFile.WriteString(" \n e.next();\n")
mdFile.WriteString("});\n")
mdFile.WriteString("```\n\n")
// Add separator between hooks
mdFile.WriteString("---\n\n")
}
}
// Write the referenced structs
if len(referencedStructs) > 0 {
mdFile.WriteString("\n# Referenced Types\n\n")
}
for _, goStruct := range referencedStructs {
mdFile.WriteString(fmt.Sprintf("#### %s\n\n", goStruct.FormattedName))
mdFile.WriteString(fmt.Sprintf("<div id=\"%s\"></div>\n\n", goStruct.FormattedName))
mdFile.WriteString(fmt.Sprintf("**Filepath:** `%s`\n\n", strings.TrimPrefix(goStruct.Filepath, "../")))
if len(goStruct.Fields) > 0 {
mdFile.WriteString("**Fields:**\n\n")
mdFile.WriteString("<Table>\n")
mdFile.WriteString("<TableCaption>Fields</TableCaption>\n")
mdFile.WriteString("<TableHeader>\n")
mdFile.WriteString("<TableRow>\n")
mdFile.WriteString("<TableHead>Property</TableHead>\n")
mdFile.WriteString("<TableHead>Type</TableHead>\n")
mdFile.WriteString("<TableHead>Description</TableHead>\n")
mdFile.WriteString("</TableRow>\n")
mdFile.WriteString("</TableHeader>\n")
mdFile.WriteString("<TableBody>\n")
for _, field := range goStruct.Fields {
mdFile.WriteString(fmt.Sprintf("<TableRow>\n"))
mdFile.WriteString(fmt.Sprintf("<TableCell className=\"py-1 px-2 max-w-[200px] break-all\">%s</TableCell>\n", field.JsonName))
typeContainsReference := false
if field.UsedStructType != "" && isCustomStruct(field.UsedStructType) {
typeContainsReference = true
}
if typeContainsReference {
link := fmt.Sprintf("<a href=\"#%s\">`%s`</a>", field.UsedTypescriptType, field.TypescriptType)
mdFile.WriteString(fmt.Sprintf("<TableCell className=\"py-1 px-2 break-all\">%s</TableCell>\n", link))
} else {
mdFile.WriteString(fmt.Sprintf("<TableCell className=\"py-1 px-2 break-all\">`%s`</TableCell>\n", field.TypescriptType))
}
mdFile.WriteString(fmt.Sprintf("<TableCell className=\"py-1 px-2 max-w-[200px] break-all\">%s</TableCell>\n", cmp.Or(strings.Join(field.Comments, "\n"), "-")))
mdFile.WriteString("</TableRow>\n")
}
mdFile.WriteString("</TableBody>\n")
mdFile.WriteString("</Table>\n")
}
if goStruct.AliasOf != nil {
if goStruct.AliasOf.DeclaredValues != nil && len(goStruct.AliasOf.DeclaredValues) > 0 {
union := ""
if len(goStruct.AliasOf.DeclaredValues) > 5 {
union = strings.Join(goStruct.AliasOf.DeclaredValues, " |\n ")
} else {
union = strings.Join(goStruct.AliasOf.DeclaredValues, " | ")
}
mdFile.WriteString(fmt.Sprintf("`%s`\n\n", union))
} else {
mdFile.WriteString(fmt.Sprintf("`%s`\n\n", goStruct.AliasOf.TypescriptType))
}
}
mdFile.WriteString("\n")
}
}

View File

@@ -0,0 +1,810 @@
package codegen
import (
"encoding/json"
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path/filepath"
"reflect"
"strings"
"unicode"
)
type GoStruct struct {
Filepath string `json:"filepath"`
Filename string `json:"filename"`
Name string `json:"name"`
FormattedName string `json:"formattedName"` // name with package prefix e.g. models.User => Models_User
Package string `json:"package"`
Fields []*GoStructField `json:"fields"`
AliasOf *GoAlias `json:"aliasOf,omitempty"`
Comments []string `json:"comments"`
EmbeddedStructTypes []string `json:"embeddedStructNames,omitempty"`
}
type GoAlias struct {
GoType string `json:"goType"`
TypescriptType string `json:"typescriptType"`
UsedTypescriptType string `json:"usedTypescriptType,omitempty"`
DeclaredValues []string `json:"declaredValues"`
UsedStructType string `json:"usedStructName,omitempty"`
}
type GoStructField struct {
Name string `json:"name"`
JsonName string `json:"jsonName"`
// e.g. map[string]models.User
GoType string `json:"goType"`
// e.g. []struct{Test string `json:"test"`, Test2 string `json:"test2"`}
InlineStructType string `json:"inlineStructType,omitempty"`
// e.g. User
TypescriptType string `json:"typescriptType"`
// e.g. TypescriptType = Array<Models_User> => UsedTypescriptType = Models_User
UsedTypescriptType string `json:"usedTypescriptType,omitempty"`
// e.g. GoType = map[string]models.User => TypescriptType = User => UsedStructType = models.User
UsedStructType string `json:"usedStructName,omitempty"`
// If no 'omitempty' and not a pointer
Required bool `json:"required"`
Public bool `json:"public"`
Comments []string `json:"comments"`
}
var typePrefixesByPackage = map[string]string{
"anilist": "AL_",
"auto_downloader": "AutoDownloader_",
"autodownloader": "AutoDownloader_",
"entities": "",
"db": "DB_",
"db_bridge": "DB_",
"models": "Models_",
"playbackmanager": "PlaybackManager_",
"torrent_client": "TorrentClient_",
"events": "Events_",
"torrent": "Torrent_",
"manga": "Manga_",
"autoscanner": "AutoScanner_",
"listsync": "ListSync_",
"util": "Util_",
"scanner": "Scanner_",
"offline": "Offline_",
"discordrpc": "DiscordRPC_",
"discordrpc_presence": "DiscordRPC_",
"anizip": "Anizip_",
"animap": "Animap_",
"onlinestream": "Onlinestream_",
"onlinestream_providers": "Onlinestream_",
"onlinestream_sources": "Onlinestream_",
"manga_providers": "Manga_",
"chapter_downloader": "ChapterDownloader_",
"manga_downloader": "MangaDownloader_",
"docs": "INTERNAL_",
"tvdb": "TVDB_",
"metadata": "Metadata_",
"mappings": "Mappings_",
"mal": "MAL_",
"handlers": "",
"animetosho": "AnimeTosho_",
"updater": "Updater_",
"anime": "Anime_",
"anime_types": "Anime_",
"summary": "Summary_",
"filesystem": "Filesystem_",
"filecache": "Filecache_",
"core": "INTERNAL_",
"comparison": "Comparison_",
"mediastream": "Mediastream_",
"torrentstream": "Torrentstream_",
"extension": "Extension_",
"extension_repo": "ExtensionRepo_",
//"vendor_hibike_manga": "HibikeManga_",
//"vendor_hibike_onlinestream": "HibikeOnlinestream_",
//"vendor_hibike_torrent": "HibikeTorrent_",
//"vendor_hibike_mediaplayer": "HibikeMediaPlayer_",
//"vendor_hibike_extension": "HibikeExtension_",
"hibikemanga": "HibikeManga_",
"hibikeonlinestream": "HibikeOnlinestream_",
"hibiketorrent": "HibikeTorrent_",
"hibikemediaplayer": "HibikeMediaPlayer_",
"hibikeextension": "HibikeExtension_",
"continuity": "Continuity_",
"local": "Local_",
"debrid": "Debrid_",
"debrid_client": "DebridClient_",
"report": "Report_",
"habari": "Habari_",
"vendor_habari": "Habari_",
"discordrpc_client": "DiscordRPC_",
"directstream": "Directstream_",
"nativeplayer": "NativePlayer_",
"mkvparser": "MKVParser_",
"nakama": "Nakama_",
}
func getTypePrefix(packageName string) string {
if prefix, ok := typePrefixesByPackage[packageName]; ok {
return prefix
}
return ""
}
func ExtractStructs(dir string, outDir string) {
structs := make([]*GoStruct, 0)
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(info.Name(), ".go") {
res, err := getGoStructsFromFile(path, info)
if err != nil {
return err
}
structs = append(structs, res...)
}
return nil
})
if err != nil {
fmt.Println("Error:", err)
return
}
// Write structs to file
_ = os.MkdirAll(outDir, os.ModePerm)
file, err := os.Create(outDir + "/public_structs.json")
if err != nil {
fmt.Println("Error:", err)
return
}
defer file.Close()
encoder := json.NewEncoder(file)
encoder.SetIndent("", " ")
if err := encoder.Encode(structs); err != nil {
fmt.Println("Error:", err)
return
}
fmt.Println("Public structs extracted and saved to public_structs.json")
}
func getGoStructsFromFile(path string, info os.FileInfo) (structs []*GoStruct, err error) {
// Parse the Go file
file, err := parser.ParseFile(token.NewFileSet(), path, nil, parser.ParseComments)
if err != nil {
return nil, err
}
packageName := file.Name.Name
// Extract public structs
for _, decl := range file.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok || genDecl.Tok != token.TYPE {
continue
}
//
// Go through each type declaration
//
for _, spec := range genDecl.Specs {
typeSpec, ok := spec.(*ast.TypeSpec)
if !ok {
continue
}
if !typeSpec.Name.IsExported() {
continue
}
//
// The type declaration is an alias
// e.g. alias.Name: string, typeSpec.Name.Name: MediaListStatus
//
alias, ok := typeSpec.Type.(*ast.Ident)
if ok {
if alias.Name == typeSpec.Name.Name {
continue
}
goStruct := goStructFromAlias(path, info, genDecl, typeSpec, packageName, alias, file)
structs = append(structs, goStruct)
continue
}
//
// The type declaration is a struct
//
structType, ok := typeSpec.Type.(*ast.StructType)
if ok {
subStructs := make([]*GoStruct, 0)
for _, field := range structType.Fields.List {
if field.Names != nil && len(field.Names) > 0 {
subStructType, ok := field.Type.(*ast.StructType)
if ok {
name := fmt.Sprintf("%s_%s", typeSpec.Name.Name, field.Names[0].Name)
subStruct := goStructFromStruct(path, info, genDecl, name, packageName, subStructType)
subStructs = append(subStructs, subStruct)
continue
}
}
}
goStruct := goStructFromStruct(path, info, genDecl, typeSpec.Name.Name, packageName, structType)
// Replace struct fields with sub structs
for _, field := range goStruct.Fields {
if field.GoType == "__STRUCT__" {
for _, subStruct := range subStructs {
if subStruct.Name == fmt.Sprintf("%s_%s", typeSpec.Name.Name, field.Name) {
field.GoType = subStruct.FormattedName
field.TypescriptType = subStruct.FormattedName
field.UsedStructType = fmt.Sprintf("%s.%s", subStruct.Package, subStruct.Name)
break
}
}
}
}
structs = append(structs, goStruct)
structs = append(structs, subStructs...)
continue
}
mapType, ok := typeSpec.Type.(*ast.MapType)
if ok {
goStruct := &GoStruct{
Filepath: path,
Filename: info.Name(),
Name: typeSpec.Name.Name,
FormattedName: getTypePrefix(packageName) + typeSpec.Name.Name,
Package: packageName,
Fields: make([]*GoStructField, 0),
}
usedStructType, usedStructPkgName := getUsedStructType(mapType, packageName)
goStruct.AliasOf = &GoAlias{
GoType: fieldTypeString(mapType),
TypescriptType: fieldTypeToTypescriptType(mapType, usedStructPkgName),
UsedStructType: usedStructType,
}
structs = append(structs, goStruct)
continue
}
sliceType, ok := typeSpec.Type.(*ast.ArrayType)
if ok {
goStruct := &GoStruct{
Filepath: path,
Filename: info.Name(),
Name: typeSpec.Name.Name,
FormattedName: getTypePrefix(packageName) + typeSpec.Name.Name,
Package: packageName,
Fields: make([]*GoStructField, 0),
}
usedStructType, usedStructPkgName := getUsedStructType(sliceType, packageName)
goStruct.AliasOf = &GoAlias{
GoType: fieldTypeString(sliceType),
TypescriptType: fieldTypeToTypescriptType(sliceType, usedStructPkgName),
UsedStructType: usedStructType,
}
structs = append(structs, goStruct)
continue
}
}
}
return structs, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Example:
//
// type User struct {
// ID int `json:"id"`
// Name string `json:"name"`
// }
func goStructFromStruct(path string, info os.FileInfo, genDecl *ast.GenDecl, name string, packageName string, structType *ast.StructType) *GoStruct {
// Get comments
comments := make([]string, 0)
if genDecl.Doc != nil && genDecl.Doc.List != nil && len(genDecl.Doc.List) > 0 {
for _, comment := range genDecl.Doc.List {
comments = append(comments, strings.TrimPrefix(comment.Text, "//"))
}
}
goStruct := &GoStruct{
Filepath: filepath.ToSlash(path),
Filename: info.Name(),
Name: name,
FormattedName: getTypePrefix(packageName) + name,
Package: packageName,
Fields: make([]*GoStructField, 0),
EmbeddedStructTypes: make([]string, 0),
Comments: comments,
}
// Get fields
for _, field := range structType.Fields.List {
if field.Names == nil || len(field.Names) == 0 {
if len(field.Names) == 0 {
switch field.Type.(type) {
case *ast.Ident, *ast.StarExpr, *ast.SelectorExpr:
usedStructType, _ := getUsedStructType(field.Type, packageName)
goStruct.EmbeddedStructTypes = append(goStruct.EmbeddedStructTypes, usedStructType)
}
}
continue
}
// Get fields comments
comments := make([]string, 0)
if field.Comment != nil && field.Comment.List != nil && len(field.Comment.List) > 0 {
for _, comment := range field.Comment.List {
comments = append(comments, strings.TrimPrefix(comment.Text, "//"))
}
}
required := true
if field.Tag != nil {
tag := reflect.StructTag(field.Tag.Value[1 : len(field.Tag.Value)-1])
jsonTag := tag.Get("json")
if jsonTag != "" {
jsonParts := strings.Split(jsonTag, ",")
if len(jsonParts) > 1 && jsonParts[1] == "omitempty" {
required = false
}
}
}
switch field.Type.(type) {
case *ast.StarExpr, *ast.ArrayType, *ast.MapType, *ast.SelectorExpr:
required = false
}
fieldName := field.Names[0].Name
usedStructType, usedStructPkgName := getUsedStructType(field.Type, packageName)
tsType := fieldTypeToTypescriptType(field.Type, usedStructPkgName)
goStructField := &GoStructField{
Name: fieldName,
JsonName: jsonFieldName(field),
GoType: fieldTypeString(field.Type),
TypescriptType: tsType,
UsedTypescriptType: fieldTypeToUsedTypescriptType(tsType),
Required: required,
Public: field.Names[0].IsExported(),
UsedStructType: usedStructType,
Comments: comments,
}
// If it's an inline struct, capture the full definition as a string
if goStructField.GoType == "__STRUCT__" {
if structType, ok := field.Type.(*ast.StructType); ok {
goStructField.InlineStructType = formatInlineStruct(structType)
}
} else {
// Check if it's a slice of inline structs
if arrayType, ok := field.Type.(*ast.ArrayType); ok {
if structType, ok := arrayType.Elt.(*ast.StructType); ok {
goStructField.InlineStructType = "[]" + formatInlineStruct(structType)
}
}
// Check if it's a map with inline struct values
if mapType, ok := field.Type.(*ast.MapType); ok {
if structType, ok := mapType.Value.(*ast.StructType); ok {
goStructField.InlineStructType = "map[" + fieldTypeString(mapType.Key) + "]" + formatInlineStruct(structType)
}
}
}
goStruct.Fields = append(goStruct.Fields, goStructField)
}
return goStruct
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func goStructFromAlias(path string, info os.FileInfo, genDecl *ast.GenDecl, typeSpec *ast.TypeSpec, packageName string, alias *ast.Ident, file *ast.File) *GoStruct {
// Get comments
comments := make([]string, 0)
if genDecl.Doc != nil && genDecl.Doc.List != nil && len(genDecl.Doc.List) > 0 {
for _, comment := range genDecl.Doc.List {
comments = append(comments, strings.TrimPrefix(comment.Text, "//"))
}
}
usedStructType, usedStructPkgName := getUsedStructType(typeSpec.Type, packageName)
tsType := fieldTypeToTypescriptType(typeSpec.Type, usedStructPkgName)
goStruct := &GoStruct{
Filepath: filepath.ToSlash(path),
Filename: info.Name(),
Name: typeSpec.Name.Name,
Package: packageName,
FormattedName: getTypePrefix(packageName) + typeSpec.Name.Name,
Fields: make([]*GoStructField, 0),
Comments: comments,
AliasOf: &GoAlias{
GoType: alias.Name,
TypescriptType: tsType,
UsedTypescriptType: fieldTypeToUsedTypescriptType(tsType),
UsedStructType: usedStructType,
},
}
// Get declared values - useful for building enums or union types
// e.g. const Something AliasType = "something"
goStruct.AliasOf.DeclaredValues = make([]string, 0)
for _, decl := range file.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok || genDecl.Tok != token.CONST {
continue
}
for _, spec := range genDecl.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
continue
}
valueSpecType := fieldTypeString(valueSpec.Type)
if len(valueSpec.Names) == 1 && valueSpec.Names[0].IsExported() && valueSpecType == typeSpec.Name.Name {
for _, value := range valueSpec.Values {
name, ok := value.(*ast.BasicLit)
if !ok {
continue
}
goStruct.AliasOf.DeclaredValues = append(goStruct.AliasOf.DeclaredValues, name.Value)
}
}
}
}
return goStruct
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// getUsedStructType returns the used struct type for a given type declaration.
// For example, if the type declaration is `map[string]models.User`, the used struct type is `models.User`.
// If the type declaration is `[]User`, the used struct type is `{packageName}.User`.
func getUsedStructType(expr ast.Expr, packageName string) (string, string) {
usedStructType := fieldTypeToUsedStructType(expr)
switch usedStructType {
case "string", "bool", "byte", "uint", "uint8", "uint16", "uint32", "uint64", "int", "int8", "int16", "int32", "int64", "float", "float32", "float64":
return "", ""
case "__STRUCT__":
return "", ""
}
if usedStructType != "__STRUCT__" && usedStructType != "" && !strings.Contains(usedStructType, ".") {
usedStructType = packageName + "." + usedStructType
}
pkgName := strings.Split(usedStructType, ".")[0]
return usedStructType, pkgName
}
// fieldTypeString returns the field type as a string.
// For example, if the field type is `[]*models.User`, the return value is `[]models.User`.
// If the field type is `[]InternalStruct`, the return value is `[]InternalStruct`.
func fieldTypeString(fieldType ast.Expr) string {
switch t := fieldType.(type) {
case *ast.Ident:
return t.Name
case *ast.StarExpr:
//return "*" + fieldTypeString(t.X)
return fieldTypeString(t.X)
case *ast.ArrayType:
if fieldTypeString(t.Elt) == "byte" {
return "string"
}
return "[]" + fieldTypeString(t.Elt)
case *ast.MapType:
return "map[" + fieldTypeString(t.Key) + "]" + fieldTypeString(t.Value)
case *ast.SelectorExpr:
return fieldTypeString(t.X) + "." + t.Sel.Name
case *ast.StructType:
return "__STRUCT__"
default:
return ""
}
}
// fieldTypeToTypescriptType returns the field type as a string in TypeScript format.
// For example, if the field type is `[]*models.User`, the return value is `Array<Models_User>`.
func fieldTypeToTypescriptType(fieldType ast.Expr, usedStructPkgName string) string {
switch t := fieldType.(type) {
case *ast.Ident:
switch t.Name {
case "string":
return "string"
case "uint", "uint8", "uint16", "uint32", "uint64", "int", "int8", "int16", "int32", "int64", "float", "float32", "float64":
return "number"
case "bool":
return "boolean"
case "byte":
return "string"
case "time.Time":
return "string"
case "nil":
return "null"
default:
return getTypePrefix(usedStructPkgName) + t.Name
}
case *ast.StarExpr:
return fieldTypeToTypescriptType(t.X, usedStructPkgName)
case *ast.ArrayType:
if fieldTypeToTypescriptType(t.Elt, usedStructPkgName) == "byte" {
return "string"
}
return "Array<" + fieldTypeToTypescriptType(t.Elt, usedStructPkgName) + ">"
case *ast.MapType:
return "Record<" + fieldTypeToTypescriptType(t.Key, usedStructPkgName) + ", " + fieldTypeToTypescriptType(t.Value, usedStructPkgName) + ">"
case *ast.SelectorExpr:
if t.Sel.Name == "Time" {
return "string"
}
return getTypePrefix(usedStructPkgName) + t.Sel.Name
case *ast.StructType:
s := "{ "
for _, field := range t.Fields.List {
s += jsonFieldName(field) + ": " + fieldTypeToTypescriptType(field.Type, usedStructPkgName) + "; "
}
s += "}"
return s
default:
return "any"
}
}
func stringGoTypeToTypescriptType(goType string) string {
switch goType {
case "string":
return "string"
case "uint", "uint8", "uint16", "uint32", "uint64", "int", "int8", "int16", "int32", "int64", "float", "float32", "float64":
return "number"
case "nil":
return "null"
case "bool":
return "boolean"
case "time.Time":
return "string"
}
if strings.HasPrefix(goType, "[]") {
return "Array<" + stringGoTypeToTypescriptType(goType[2:]) + ">"
}
if strings.HasPrefix(goType, "*") {
return stringGoTypeToTypescriptType(goType[1:])
}
if strings.HasPrefix(goType, "map[") {
s := strings.TrimPrefix(goType, "map[")
key := ""
value := ""
for i, c := range s {
if c == ']' {
key = s[:i]
value = s[i+1:]
break
}
}
return "Record<" + stringGoTypeToTypescriptType(key) + ", " + stringGoTypeToTypescriptType(value) + ">"
}
if strings.Contains(goType, ".") {
parts := strings.Split(goType, ".")
return getTypePrefix(parts[0]) + parts[1]
}
return goType
}
func goTypeToTypescriptType(goType string) string {
switch goType {
case "string":
return "string"
case "uint", "uint8", "uint16", "uint32", "uint64", "int", "int8", "int16", "int32", "int64", "float", "float32", "float64":
return "number"
case "bool":
return "boolean"
case "nil":
return "null"
case "time.Time":
return "string"
default:
return "unknown"
}
}
// fieldTypeUnformattedString returns the field type as a string without formatting.
// For example, if the field type is `[]*models.User`, the return value is `models.User`.
// /!\ Caveat: this assumes that the map key is always a string.
func fieldTypeUnformattedString(fieldType ast.Expr) string {
switch t := fieldType.(type) {
case *ast.Ident:
return t.Name
case *ast.StarExpr:
//return "*" + fieldTypeString(t.X)
return fieldTypeUnformattedString(t.X)
case *ast.ArrayType:
return fieldTypeUnformattedString(t.Elt)
case *ast.MapType:
return fieldTypeUnformattedString(t.Value)
case *ast.SelectorExpr:
return fieldTypeString(t.X) + "." + t.Sel.Name
default:
return ""
}
}
// fieldTypeToUsedStructType returns the used struct type for a given field type.
// For example, if the field type is `[]*models.User`, the return value is `models.User`.
func fieldTypeToUsedStructType(fieldType ast.Expr) string {
switch t := fieldType.(type) {
case *ast.StarExpr:
return fieldTypeString(t.X)
case *ast.ArrayType:
return fieldTypeString(t.Elt)
case *ast.MapType:
return fieldTypeUnformattedString(t.Value)
case *ast.SelectorExpr:
return fieldTypeString(t)
case *ast.Ident:
return t.Name
case *ast.StructType:
return "__STRUCT__"
default:
return ""
}
}
func jsonFieldName(field *ast.Field) string {
if field.Tag != nil {
tag := reflect.StructTag(strings.ReplaceAll(field.Tag.Value[1:len(field.Tag.Value)-1], "\\\"", "\""))
jsonTag := tag.Get("json")
if jsonTag != "" {
jsonParts := strings.Split(jsonTag, ",")
if jsonParts[0] == "-" {
return ""
}
if jsonParts[0] != "" {
return jsonParts[0]
}
return jsonParts[0]
}
}
return field.Names[0].Name
}
func jsonFieldOmitEmpty(field *ast.Field) bool {
if field.Tag != nil {
tag := reflect.StructTag(strings.ReplaceAll(field.Tag.Value[1:len(field.Tag.Value)-1], "\\\"", "\""))
jsonTag := tag.Get("json")
if jsonTag != "" {
jsonParts := strings.Split(jsonTag, ",")
return len(jsonParts) > 1 && jsonParts[1] == "omitempty"
}
}
return false
}
func isCustomStruct(goType string) bool {
return goTypeToTypescriptType(goType) == "unknown"
}
var nameExceptions = map[string]string{"OAuth2": "oauth2"}
func convertGoToJSName(name string) string {
if v, ok := nameExceptions[name]; ok {
return v
}
startUppercase := make([]rune, 0, len(name))
for _, c := range name {
if c != '_' && !unicode.IsUpper(c) && !unicode.IsDigit(c) {
break
}
startUppercase = append(startUppercase, c)
}
totalStartUppercase := len(startUppercase)
// all uppercase eg. "JSON" -> "json"
if len(name) == totalStartUppercase {
return strings.ToLower(name)
}
// eg. "JSONField" -> "jsonField"
if totalStartUppercase > 1 {
return strings.ToLower(name[0:totalStartUppercase-1]) + name[totalStartUppercase-1:]
}
// eg. "GetField" -> "getField"
if totalStartUppercase == 1 {
return strings.ToLower(name[0:1]) + name[1:]
}
return name
}
// fieldTypeToUsedTypescriptType extracts the core TypeScript type from complex type expressions
// For example, if the type is Array<Models_User>, it returns Models_User
// If the type is Record<string, Models_User>, it returns Models_User
func fieldTypeToUsedTypescriptType(tsType string) string {
// Handle arrays: Array<Type> -> Type
if strings.HasPrefix(tsType, "Array<") && strings.HasSuffix(tsType, ">") {
innerType := strings.TrimPrefix(strings.TrimSuffix(tsType, ">"), "Array<")
return fieldTypeToUsedTypescriptType(innerType)
}
// Handle records: Record<Key, Value> -> Value
if strings.HasPrefix(tsType, "Record<") && strings.HasSuffix(tsType, ">") {
innerType := strings.TrimPrefix(strings.TrimSuffix(tsType, ">"), "Record<")
// Find the comma that separates key and value
commaIndex := -1
bracketCount := 0
for i, char := range innerType {
if char == '<' {
bracketCount++
} else if char == '>' {
bracketCount--
} else if char == ',' && bracketCount == 0 {
commaIndex = i
break
}
}
if commaIndex != -1 {
valueType := strings.TrimSpace(innerType[commaIndex+1:])
return fieldTypeToUsedTypescriptType(valueType)
}
}
// Handle primitive types
switch tsType {
case "string", "number", "boolean", "any", "null", "undefined":
return ""
}
return tsType
}
// formatInlineStruct formats an inline struct definition as a string
// e.g. struct{Test string `json:"test"`, Test2 string `json:"test2"`}
func formatInlineStruct(structType *ast.StructType) string {
result := "struct{\n"
for i, field := range structType.Fields.List {
if i > 0 {
result += "\n"
}
if field.Names != nil && len(field.Names) > 0 {
result += field.Names[0].Name + " " + fieldTypeString(field.Type)
if field.Tag != nil {
result += " " + field.Tag.Value
}
} else {
result += fieldTypeString(field.Type)
}
}
result += "}"
return result
}

View File

@@ -0,0 +1,23 @@
package codegen
import (
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/require"
"os"
"path/filepath"
"testing"
)
func TestGetGoStructsFromFile(t *testing.T) {
testPath := filepath.Join(".", "examples", "structs1.go")
info, err := os.Stat(testPath)
require.NoError(t, err)
goStructs, err := getGoStructsFromFile(testPath, info)
require.NoError(t, err)
spew.Dump(goStructs)
}

View File

@@ -0,0 +1,465 @@
package codegen
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"slices"
"strconv"
"strings"
"github.com/samber/lo"
)
const (
typescriptEndpointsFileName = "endpoints.ts"
typescriptEndpointTypesFileName = "endpoint.types.ts"
typescriptHooksFileName = "hooks_template.ts"
goEndpointsFileName = "endpoints.go"
space = " "
)
var additionalStructNamesForEndpoints = []string{}
func GenerateTypescriptEndpointsFile(handlersJsonPath string, structsJsonPath string, outDir string, eventDir string) []string {
handlers := LoadHandlers(handlersJsonPath)
structs := LoadPublicStructs(structsJsonPath)
_ = os.MkdirAll(outDir, os.ModePerm)
f, err := os.Create(filepath.Join(outDir, typescriptEndpointsFileName))
if err != nil {
panic(err)
}
defer f.Close()
typeF, err := os.Create(filepath.Join(outDir, typescriptEndpointTypesFileName))
if err != nil {
panic(err)
}
defer typeF.Close()
hooksF, err := os.Create(filepath.Join(outDir, typescriptHooksFileName))
if err != nil {
panic(err)
}
defer hooksF.Close()
f.WriteString("// This code was generated by codegen/main.go. DO NOT EDIT.\n\n")
f.WriteString(`export type ApiEndpoints = Record<string, Record<string, {
key: string,
methods: ("POST" | "GET" | "PATCH" | "PUT" | "DELETE")[],
endpoint: string
}>>
`)
f.WriteString("export const API_ENDPOINTS = {\n")
groupedByFile := make(map[string][]*RouteHandler)
for _, handler := range handlers {
if _, ok := groupedByFile[handler.Filename]; !ok {
groupedByFile[handler.Filename] = make([]*RouteHandler, 0)
}
groupedByFile[handler.Filename] = append(groupedByFile[handler.Filename], handler)
}
filenames := make([]string, 0)
for k := range groupedByFile {
filenames = append(filenames, k)
}
slices.SortStableFunc(filenames, func(i, j string) int {
return strings.Compare(i, j)
})
// Store the endpoints
endpointsMap := make(map[string]string)
for _, filename := range filenames {
routes := groupedByFile[filename]
if len(routes) == 0 {
continue
}
if lo.EveryBy(routes, func(route *RouteHandler) bool {
return route.Api == nil || len(route.Api.Methods) == 0
}) {
continue
}
groupName := strings.ToUpper(strings.TrimSuffix(filename, ".go"))
writeLine(f, fmt.Sprintf("\t%s: {", groupName)) // USERS: {
for _, route := range groupedByFile[filename] {
if route.Api == nil || len(route.Api.Methods) == 0 {
continue
}
if len(route.Api.Descriptions) > 0 {
writeLine(f, " /**")
f.WriteString(fmt.Sprintf(" * @description\n"))
f.WriteString(fmt.Sprintf(" * Route %s\n", route.Api.Summary))
for _, cmt := range route.Api.Descriptions {
writeLine(f, fmt.Sprintf(" * %s", strings.TrimSpace(cmt)))
}
writeLine(f, " */")
}
writeLine(f, fmt.Sprintf("\t\t%s: {", strings.TrimPrefix(route.Name, "Handle"))) // GetAnimeCollection: {
methodStr := ""
if len(route.Api.Methods) > 1 {
methodStr = fmt.Sprintf("\"%s\"", strings.Join(route.Api.Methods, "\", \""))
} else {
methodStr = fmt.Sprintf("\"%s\"", route.Api.Methods[0])
}
endpointsMap[strings.TrimPrefix(route.Name, "Handle")] = getEndpointKey(route.Name, groupName)
writeLine(f, fmt.Sprintf("\t\t\tkey: \"%s\",", getEndpointKey(route.Name, groupName)))
writeLine(f, fmt.Sprintf("\t\t\tmethods: [%s],", methodStr)) // methods: ['GET'],
writeLine(f, fmt.Sprintf("\t\t\tendpoint: \"%s\",", route.Api.Endpoint)) // path: '/api/v1/anilist/collection',
writeLine(f, "\t\t},") // },
}
writeLine(f, "\t},") // },
}
f.WriteString("} satisfies ApiEndpoints\n\n")
referenceGoStructs := make([]string, 0)
for _, filename := range filenames {
routes := groupedByFile[filename]
if len(routes) == 0 {
continue
}
for _, route := range groupedByFile[filename] {
if route.Api == nil || len(route.Api.Methods) == 0 {
continue
}
if len(route.Api.Params) == 0 && len(route.Api.BodyFields) == 0 {
continue
}
for _, param := range route.Api.BodyFields {
if param.UsedStructType != "" {
referenceGoStructs = append(referenceGoStructs, param.UsedStructType)
}
}
for _, param := range route.Api.Params {
if param.UsedStructType != "" {
referenceGoStructs = append(referenceGoStructs, param.UsedStructType)
}
}
}
}
referenceGoStructs = lo.Uniq(referenceGoStructs)
typeF.WriteString("// This code was generated by codegen/main.go. DO NOT EDIT.\n\n")
//
// Imports
//
importedTypes := make([]string, 0)
//
for _, structName := range referenceGoStructs {
parts := strings.Split(structName, ".")
if len(parts) != 2 {
continue
}
var goStruct *GoStruct
for _, s := range structs {
if s.Name == parts[1] && s.Package == parts[0] {
goStruct = s
break
}
}
if goStruct == nil {
continue
}
importedTypes = append(importedTypes, goStruct.FormattedName)
}
for _, otherStrctName := range additionalStructNamesForEndpoints {
importedTypes = append(importedTypes, stringGoTypeToTypescriptType(otherStrctName))
}
//
slices.SortStableFunc(importedTypes, func(i, j string) int {
return strings.Compare(i, j)
})
typeF.WriteString("import type {\n")
for _, typeName := range importedTypes {
typeF.WriteString(fmt.Sprintf(" %s,\n", typeName))
}
typeF.WriteString("} from \"@/api/generated/types.ts\"\n\n")
//
// Types
//
for _, filename := range filenames {
routes := groupedByFile[filename]
if len(routes) == 0 {
continue
}
typeF.WriteString("//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n")
typeF.WriteString(fmt.Sprintf("// %s\n", strings.TrimSuffix(filename, ".go")))
typeF.WriteString("//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n")
for _, route := range groupedByFile[filename] {
if route.Api == nil || len(route.Api.Methods) == 0 {
continue
}
if len(route.Api.Params) == 0 && len(route.Api.BodyFields) == 0 {
continue
}
typeF.WriteString("/**\n")
typeF.WriteString(fmt.Sprintf(" * - Filepath: %s\n", filepath.ToSlash(strings.TrimPrefix(route.Filepath, "..\\"))))
typeF.WriteString(fmt.Sprintf(" * - Filename: %s\n", route.Filename))
typeF.WriteString(fmt.Sprintf(" * - Endpoint: %s\n", route.Api.Endpoint))
if len(route.Api.Summary) > 0 {
typeF.WriteString(fmt.Sprintf(" * @description\n"))
typeF.WriteString(fmt.Sprintf(" * Route %s\n", strings.TrimSpace(route.Api.Summary)))
}
typeF.WriteString(" */\n")
typeF.WriteString(fmt.Sprintf("export type %s_Variables = {\n", strings.TrimPrefix(route.Name, "Handle"))) // export type EditAnimeEntry_Variables = {
addedBodyFields := false
for _, param := range route.Api.BodyFields {
writeParamField(typeF, route, param) // mediaId: number;
if param.UsedStructType != "" {
referenceGoStructs = append(referenceGoStructs, param.UsedStructType)
}
addedBodyFields = true
}
if !addedBodyFields {
for _, param := range route.Api.Params {
writeParamField(typeF, route, param) // mediaId: number;
if param.UsedStructType != "" {
referenceGoStructs = append(referenceGoStructs, param.UsedStructType)
}
}
}
writeLine(typeF, "}\n")
}
}
generateHooksFile(hooksF, groupedByFile, filenames)
generateEventFile(eventDir, endpointsMap)
return referenceGoStructs
}
func generateHooksFile(f *os.File, groupedHandlers map[string][]*RouteHandler, filenames []string) {
queryTemplate := `// export function use{handlerName}({props}) {
// return useServerQuery{<}{TData}{TVar}{>}({
// endpoint: API_ENDPOINTS.{groupName}.{handlerName}.endpoint{endpointSuffix},
// method: API_ENDPOINTS.{groupName}.{handlerName}.methods[%d],
// queryKey: [API_ENDPOINTS.{groupName}.{handlerName}.key],
// enabled: true,
// })
// }
`
mutationTemplate := `// export function use{handlerName}({props}) {
// return useServerMutation{<}{TData}{TVar}{>}({
// endpoint: API_ENDPOINTS.{groupName}.{handlerName}.endpoint{endpointSuffix},
// method: API_ENDPOINTS.{groupName}.{handlerName}.methods[%d],
// mutationKey: [API_ENDPOINTS.{groupName}.{handlerName}.key],
// onSuccess: async () => {
//
// },
// })
// }
`
tmpGroupTmpls := make(map[string][]string)
for _, filename := range filenames {
routes := groupedHandlers[filename]
if len(routes) == 0 {
continue
}
if lo.EveryBy(routes, func(route *RouteHandler) bool {
return route.Api == nil || len(route.Api.Methods) == 0
}) {
continue
}
f.WriteString("//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n")
f.WriteString(fmt.Sprintf("// %s\n", strings.TrimSuffix(filename, ".go")))
f.WriteString("//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n")
tmpls := make([]string, 0)
for _, route := range groupedHandlers[filename] {
if route.Api == nil || len(route.Api.Methods) == 0 {
continue
}
for i, method := range route.Api.Methods {
tmpl := ""
if method == "GET" {
getTemplate := strings.ReplaceAll(queryTemplate, "{handlerName}", strings.TrimPrefix(route.Name, "Handle"))
getTemplate = strings.ReplaceAll(getTemplate, "{groupName}", strings.ToUpper(strings.TrimSuffix(filename, ".go")))
getTemplate = strings.ReplaceAll(getTemplate, "{method}", "GET")
tmpl = getTemplate
}
if method == "POST" || method == "PATCH" || method == "PUT" || method == "DELETE" {
mutTemplate := strings.ReplaceAll(mutationTemplate, "{handlerName}", strings.TrimPrefix(route.Name, "Handle"))
mutTemplate = strings.ReplaceAll(mutTemplate, "{groupName}", strings.ToUpper(strings.TrimSuffix(filename, ".go")))
mutTemplate = strings.ReplaceAll(mutTemplate, "{method}", method)
tmpl = mutTemplate
}
tmpl = strings.ReplaceAll(tmpl, "%d", strconv.Itoa(i))
if len(route.Api.ReturnTypescriptType) == 0 {
tmpl = strings.ReplaceAll(tmpl, "{<}", "")
tmpl = strings.ReplaceAll(tmpl, "{TData}", "")
tmpl = strings.ReplaceAll(tmpl, "{TVar}", "")
tmpl = strings.ReplaceAll(tmpl, "{>}", "")
} else {
tmpl = strings.ReplaceAll(tmpl, "{<}", "<")
tmpl = strings.ReplaceAll(tmpl, "{TData}", route.Api.ReturnTypescriptType)
tmpl = strings.ReplaceAll(tmpl, "{>}", ">")
}
if len(route.Api.Params) == 0 {
tmpl = strings.ReplaceAll(tmpl, "{endpointSuffix}", "")
tmpl = strings.ReplaceAll(tmpl, "{props}", "")
} else {
props := ""
for _, param := range route.Api.Params {
props += fmt.Sprintf(`%s: %s, `, param.JsonName, param.TypescriptType)
}
tmpl = strings.ReplaceAll(tmpl, "{props}", props[:len(props)-2])
endpointSuffix := ""
for _, param := range route.Api.Params {
endpointSuffix += fmt.Sprintf(`.replace("{%s}", String(%s))`, param.JsonName, param.JsonName)
}
tmpl = strings.ReplaceAll(tmpl, "{endpointSuffix}", endpointSuffix)
}
if len(route.Api.BodyFields) == 0 {
tmpl = strings.ReplaceAll(tmpl, "{TVar}", "")
} else {
tmpl = strings.ReplaceAll(tmpl, "{TVar}", fmt.Sprintf(", %s", strings.TrimPrefix(route.Name, "Handle")+"_Variables"))
}
tmpls = append(tmpls, tmpl)
f.WriteString(tmpl)
}
}
tmpGroupTmpls[strings.TrimSuffix(filename, ".go")] = tmpls
}
//for filename, tmpls := range tmpGroupTmpls {
// hooksF, err := os.Create(filepath.Join("../seanime-web/src/api/hooks", filename+".hooks.ts"))
// if err != nil {
// panic(err)
// }
// defer hooksF.Close()
//
// for _, tmpl := range tmpls {
// hooksF.WriteString(tmpl)
// }
//}
}
func generateEventFile(eventDir string, endpointsMap map[string]string) {
fp := filepath.Join(eventDir, goEndpointsFileName)
file, err := os.Create(fp)
if err != nil {
panic(err)
}
defer file.Close()
file.WriteString("// This code was generated by codegen/main.go. DO NOT EDIT.\n")
file.WriteString("package events\n\n")
// file.WriteString(fmt"var Endpoint = map[string]string{\n")
endpoints := []string{}
for endpoint := range endpointsMap {
endpoints = append(endpoints, endpoint)
}
slices.SortStableFunc(endpoints, func(i, j string) int {
return strings.Compare(i, j)
})
goFmtSpacing := ""
file.WriteString("const (\n")
for _, endpoint := range endpoints {
file.WriteString(fmt.Sprintf(" %sEndpoint%s= \"%s\"\n", endpoint, goFmtSpacing, endpointsMap[endpoint]))
}
file.WriteString(")\n")
cmd := exec.Command("gofmt", "-w", fp)
cmd.Run()
}
func writeParamField(f *os.File, handler *RouteHandler, param *RouteHandlerParam) {
if len(param.Descriptions) > 0 {
writeLine(f, "\t/**")
for _, cmt := range param.Descriptions {
writeLine(f, fmt.Sprintf("\t * %s", strings.TrimSpace(cmt)))
}
writeLine(f, "\t */")
}
fieldSuffix := ""
if !param.Required {
fieldSuffix = "?"
}
writeLine(f, fmt.Sprintf("\t%s%s: %s", param.JsonName, fieldSuffix, param.TypescriptType))
}
func getEndpointKey(s string, groupName string) string {
s = strings.TrimPrefix(s, "Handle")
var result string
for i, v := range s {
if i > 0 && v >= 'A' && v <= 'Z' {
result += "-"
}
result += string(v)
}
result = strings.ToLower(result)
if strings.Contains(result, "t-v-d-b") {
result = strings.Replace(result, "t-v-d-b", "tvdb", 1)
}
if strings.Contains(result, "m-a-l") {
result = strings.Replace(result, "m-a-l", "mal", 1)
}
return strings.ReplaceAll(groupName, "_", "-") + "-" + result
}
func writeLine(file *os.File, template string) {
template = strings.ReplaceAll(template, "\t", space)
file.WriteString(fmt.Sprintf(template + "\n"))
}

View File

@@ -0,0 +1,334 @@
package codegen
import (
"cmp"
"fmt"
"os"
"path/filepath"
"slices"
"strings"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
const (
typescriptFileName = "types.ts"
)
// Structs that are not directly referenced by the API routes but are needed for the Typescript file.
var additionalStructNames = []string{
"torrentstream.TorrentLoadingStatus",
"torrentstream.TorrentStatus",
"debrid_client.StreamState",
"extension_repo.TrayPluginExtensionItem",
"vendor_habari.Metadata",
"nativeplayer.PlaybackInfo",
"nativeplayer.ServerEvent",
"nativeplayer.ClientEvent",
"mkvparser.SubtitleEvent",
"nakama.NakamaStatus",
}
// GenerateTypescriptFile generates a Typescript file containing the types for the API routes parameters and responses based on the Docs struct.
func GenerateTypescriptFile(docsFilePath string, publicStructsFilePath string, outDir string, goStructStrs []string) {
handlers := LoadHandlers(docsFilePath)
goStructs := LoadPublicStructs(publicStructsFilePath)
// e.g. map["models.User"]*GoStruct
goStructsMap := make(map[string]*GoStruct)
for _, goStruct := range goStructs {
goStructsMap[goStruct.Package+"."+goStruct.Name] = goStruct
}
// Expand the structs with embedded structs
for _, goStruct := range goStructs {
for _, embeddedStructType := range goStruct.EmbeddedStructTypes {
if embeddedStructType != "" {
if usedStruct, ok := goStructsMap[embeddedStructType]; ok {
for _, usedField := range usedStruct.Fields {
goStruct.Fields = append(goStruct.Fields, usedField)
}
}
}
}
}
// Create the typescript file
_ = os.MkdirAll(outDir, os.ModePerm)
file, err := os.Create(filepath.Join(outDir, typescriptFileName))
if err != nil {
panic(err)
}
defer file.Close()
// Write the typescript file
file.WriteString("// This code was generated by codegen/main.go. DO NOT EDIT.\n\n")
// Get all the returned structs from the routes
// e.g. @returns models.User
structStrMap := make(map[string]int)
for _, str := range goStructStrs {
if _, ok := structStrMap[str]; ok {
structStrMap[str]++
} else {
structStrMap[str] = 1
}
}
for _, handler := range handlers {
if handler.Api != nil {
switch handler.Api.ReturnTypescriptType {
case "null", "string", "number", "boolean":
continue
}
if _, ok := structStrMap[handler.Api.ReturnGoType]; ok {
structStrMap[handler.Api.ReturnGoType]++
} else {
structStrMap[handler.Api.ReturnGoType] = 1
}
}
}
// Isolate the structs that are returned more than once
sharedStructStrs := make([]string, 0)
otherStructStrs := make([]string, 0)
for k, v := range structStrMap {
if v > 1 {
sharedStructStrs = append(sharedStructStrs, k)
} else {
otherStructStrs = append(otherStructStrs, k)
}
}
// Now that we have the returned structs, store them in slices
sharedStructs := make([]*GoStruct, 0)
otherStructs := make([]*GoStruct, 0)
for _, structStr := range sharedStructStrs {
// e.g. "models.User"
structStrParts := strings.Split(structStr, ".")
if len(structStrParts) != 2 {
continue
}
// Find the struct
goStruct, ok := goStructsMap[structStr]
if ok {
sharedStructs = append(sharedStructs, goStruct)
}
}
for _, structStr := range otherStructStrs {
// e.g. "models.User"
structStrParts := strings.Split(structStr, ".")
if len(structStrParts) != 2 {
continue
}
// Find the struct
goStruct, ok := goStructsMap[structStr]
if ok {
otherStructs = append(otherStructs, goStruct)
}
}
// Add additional structs to otherStructs
for _, structName := range additionalStructNames {
if goStruct, ok := goStructsMap[structName]; ok {
otherStructs = append(otherStructs, goStruct)
}
}
//-------------------------
referencedStructs, ok := getReferencedStructsRecursively(sharedStructs, otherStructs, goStructsMap)
if !ok {
panic("Failed to get referenced structs")
}
// Keep track of written Typescript types
// This is to avoid name collisions
writtenTypes := make(map[string]*GoStruct)
// Group the structs by package
structsByPackage := make(map[string][]*GoStruct)
for _, goStruct := range referencedStructs {
if _, ok := structsByPackage[goStruct.Package]; !ok {
structsByPackage[goStruct.Package] = make([]*GoStruct, 0)
}
structsByPackage[goStruct.Package] = append(structsByPackage[goStruct.Package], goStruct)
}
packages := make([]string, 0)
for k := range structsByPackage {
packages = append(packages, k)
}
slices.SortStableFunc(packages, func(i, j string) int {
return cmp.Compare(i, j)
})
file.WriteString("export type Nullish<T> = T | null | undefined\n\n")
for _, pkg := range packages {
file.WriteString("//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n")
file.WriteString(fmt.Sprintf("// %s\n", strings.ReplaceAll(cases.Title(language.English, cases.Compact).String(strings.ReplaceAll(pkg, "_", " ")), " ", "")))
file.WriteString("//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n")
structs := structsByPackage[pkg]
slices.SortStableFunc(structs, func(i, j *GoStruct) int {
return cmp.Compare(i.FormattedName, j.FormattedName)
})
// Write the shared structs first
for _, goStruct := range structs {
writeTypescriptType(file, goStruct, writtenTypes)
}
}
//for _, goStruct := range referencedStructs {
//
// writeTypescriptType(file, goStruct, writtenTypes)
//
//}
}
// getReferencedStructsRecursively returns a map of GoStructs that are referenced by the fields of sharedStructs and otherStructs.
func getReferencedStructsRecursively(sharedStructs, otherStructs []*GoStruct, goStructsMap map[string]*GoStruct) (map[string]*GoStruct, bool) {
allStructs := make(map[string]*GoStruct)
for _, sharedStruct := range sharedStructs {
allStructs[sharedStruct.Package+"."+sharedStruct.Name] = sharedStruct
}
for _, otherStruct := range otherStructs {
allStructs[otherStruct.Package+"."+otherStruct.Name] = otherStruct
}
// Keep track of the structs that have been visited
referencedStructs := make(map[string]*GoStruct)
for _, strct := range allStructs {
getReferencedStructs(strct, referencedStructs, goStructsMap)
}
return referencedStructs, true
}
func getReferencedStructs(goStruct *GoStruct, referencedStructs map[string]*GoStruct, goStructsMap map[string]*GoStruct) {
if _, ok := referencedStructs[goStruct.Package+"."+goStruct.Name]; ok {
return
}
referencedStructs[goStruct.Package+"."+goStruct.Name] = goStruct
for _, field := range goStruct.Fields {
if field.UsedStructType != "" {
if usedStruct, ok := goStructsMap[field.UsedStructType]; ok {
getReferencedStructs(usedStruct, referencedStructs, goStructsMap)
}
}
}
if goStruct.AliasOf != nil {
if usedStruct, ok := goStructsMap[goStruct.AliasOf.UsedStructType]; ok {
getReferencedStructs(usedStruct, referencedStructs, goStructsMap)
}
}
}
func writeTypescriptType(f *os.File, goStruct *GoStruct, writtenTypes map[string]*GoStruct) {
f.WriteString("/**\n")
f.WriteString(fmt.Sprintf(" * - Filepath: %s\n", strings.TrimPrefix(goStruct.Filepath, "../")))
f.WriteString(fmt.Sprintf(" * - Filename: %s\n", goStruct.Filename))
f.WriteString(fmt.Sprintf(" * - Package: %s\n", goStruct.Package))
if len(goStruct.Comments) > 0 {
f.WriteString(fmt.Sprintf(" * @description\n"))
for _, cmt := range goStruct.Comments {
f.WriteString(fmt.Sprintf(" * %s\n", strings.TrimSpace(cmt)))
}
}
f.WriteString(" */\n")
if len(goStruct.Fields) > 0 {
f.WriteString(fmt.Sprintf("export type %s = {\n", goStruct.FormattedName))
for _, field := range goStruct.Fields {
if field.JsonName == "" {
continue
}
fieldNameSuffix := ""
if !field.Required {
fieldNameSuffix = "?"
}
if len(field.Comments) > 0 {
f.WriteString(fmt.Sprintf(" /**\n"))
for _, cmt := range field.Comments {
f.WriteString(fmt.Sprintf(" * %s\n", strings.TrimSpace(cmt)))
}
f.WriteString(fmt.Sprintf(" */\n"))
}
typeText := field.TypescriptType
//if !field.Required {
// switch typeText {
// case "string", "number", "boolean":
// default:
// typeText = "Nullish<" + typeText + ">"
// }
//}
f.WriteString(fmt.Sprintf(" %s%s: %s\n", field.JsonName, fieldNameSuffix, typeText))
}
f.WriteString("}\n\n")
}
if goStruct.AliasOf != nil {
if goStruct.AliasOf.DeclaredValues != nil && len(goStruct.AliasOf.DeclaredValues) > 0 {
union := ""
if len(goStruct.AliasOf.DeclaredValues) > 5 {
union = strings.Join(goStruct.AliasOf.DeclaredValues, " |\n ")
} else {
union = strings.Join(goStruct.AliasOf.DeclaredValues, " | ")
}
f.WriteString(fmt.Sprintf("export type %s = %s\n\n", goStruct.FormattedName, union))
} else {
f.WriteString(fmt.Sprintf("export type %s = %s\n\n", goStruct.FormattedName, goStruct.AliasOf.TypescriptType))
}
}
// Add the struct to the written types
writtenTypes[goStruct.Package+"."+goStruct.Name] = goStruct
}
func getUnformattedGoType(goType string) string {
if strings.HasPrefix(goType, "[]") {
return getUnformattedGoType(goType[2:])
}
if strings.HasPrefix(goType, "*") {
return getUnformattedGoType(goType[1:])
}
if strings.HasPrefix(goType, "map[") {
s := strings.TrimPrefix(goType, "map[")
value := ""
for i, c := range s {
if c == ']' {
value = s[i+1:]
break
}
}
return getUnformattedGoType(value)
}
return goType
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -0,0 +1,34 @@
package codegen
import (
"encoding/json"
"os"
)
func LoadHandlers(path string) []*RouteHandler {
var handlers []*RouteHandler
docsContent, err := os.ReadFile(path)
if err != nil {
panic(err)
}
err = json.Unmarshal(docsContent, &handlers)
if err != nil {
panic(err)
}
return handlers
}
func LoadPublicStructs(path string) []*GoStruct {
var goStructs []*GoStruct
structsContent, err := os.ReadFile(path)
if err != nil {
panic(err)
}
err = json.Unmarshal(structsContent, &goStructs)
if err != nil {
panic(err)
}
return goStructs
}

View File

@@ -0,0 +1,56 @@
//go:generate go run main.go --skipHandlers=false --skipStructs=false --skipTypes=false --skipPluginEvents=false --skipHookEvents=false --skipHandlerHookEvents=false
package main
import (
"flag"
codegen "seanime/codegen/internal"
)
func main() {
var skipHandlers bool
flag.BoolVar(&skipHandlers, "skipHandlers", false, "Skip generating docs")
var skipStructs bool
flag.BoolVar(&skipStructs, "skipStructs", false, "Skip generating structs")
var skipTypes bool
flag.BoolVar(&skipTypes, "skipTypes", false, "Skip generating types")
var skipPluginEvents bool
flag.BoolVar(&skipPluginEvents, "skipPluginEvents", false, "Skip generating plugin events")
var skipHookEvents bool
flag.BoolVar(&skipHookEvents, "skipHookEvents", false, "Skip generating hook events")
var skipHandlerHookEvents bool
flag.BoolVar(&skipHandlerHookEvents, "skipHandlerHookEvents", false, "Skip generating handler hook events")
flag.Parse()
if !skipHandlers {
codegen.GenerateHandlers("../internal/handlers", "./generated")
}
if !skipStructs {
codegen.ExtractStructs("../internal", "./generated")
}
if !skipTypes {
goStructStrs := codegen.GenerateTypescriptEndpointsFile("./generated/handlers.json", "./generated/public_structs.json", "../seanime-web/src/api/generated", "../internal/events")
codegen.GenerateTypescriptFile("./generated/handlers.json", "./generated/public_structs.json", "../seanime-web/src/api/generated", goStructStrs)
}
// if !skipHandlerHookEvents {
// codegen.GenerateHandlerHookEvents("./generated/handlers.json", "../internal/handlers")
// }
if !skipPluginEvents {
codegen.GeneratePluginEventFile("../internal/plugin/ui/events.go", "../seanime-web/src/app/(main)/_features/plugin/generated")
}
if !skipHookEvents {
codegen.GeneratePluginHooksDefinitionFile("../internal/extension_repo/goja_plugin_types", "./generated/public_structs.json", "./generated")
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 389 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 416 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 486 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 382 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 411 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 450 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 430 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 958 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 366 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 641 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 258 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 223 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 345 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 89 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 213 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

213
seanime-2.9.10/go.mod Normal file
View File

@@ -0,0 +1,213 @@
module seanime
go 1.24.3
require (
fyne.io/systray v1.11.0
github.com/5rahim/go-astisub v0.2.1
github.com/5rahim/gomkv v0.2.1
github.com/5rahim/habari v0.1.7
github.com/Masterminds/semver/v3 v3.4.0
github.com/Microsoft/go-winio v0.6.2
github.com/PuerkitoBio/goquery v1.10.3
github.com/Yamashou/gqlgenc v0.25.4
github.com/adrg/strutil v0.3.1
github.com/anacrolix/log v0.16.0
github.com/anacrolix/torrent v1.58.1
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/charmbracelet/lipgloss v1.1.0
github.com/cli/browser v1.3.0
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
github.com/dop251/goja v0.0.0-20250531102226-cb187b08699c
github.com/dop251/goja_nodejs v0.0.0-20250409162600-f7acab6894b0
github.com/dustin/go-humanize v1.0.1
github.com/evanw/esbuild v0.25.8
github.com/fsnotify/fsnotify v1.9.0
github.com/gen2brain/beeep v0.0.0-20240516210008-9c006672e7f4
github.com/glebarez/sqlite v1.11.0
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
github.com/goccy/go-json v0.10.5
github.com/gocolly/colly v1.2.0
github.com/gonutz/w32/v2 v2.12.1
github.com/google/go-querystring v1.1.0
github.com/google/uuid v1.6.0
github.com/gorilla/websocket v1.5.3
github.com/hekmon/transmissionrpc/v3 v3.0.0
github.com/huin/goupnp v1.3.0
github.com/imroc/req/v3 v3.54.0
github.com/kr/pretty v0.3.1
github.com/labstack/echo/v4 v4.13.4
github.com/mileusna/useragent v1.3.5
github.com/mmcdole/gofeed v1.3.0
github.com/ncruces/go-dns v1.2.7
github.com/neilotoole/streamcache v0.3.5
github.com/nwaples/rardecode/v2 v2.1.1
github.com/pkg/errors v0.9.1
github.com/rs/zerolog v1.34.0
github.com/samber/lo v1.51.0
github.com/samber/mo v1.15.0
github.com/sourcegraph/conc v0.3.0
github.com/spf13/viper v1.20.1
github.com/stretchr/testify v1.10.0
github.com/xfrr/goffmpeg v1.0.0
github.com/ziflex/lecho/v3 v3.8.0
golang.org/x/crypto v0.41.0
golang.org/x/image v0.30.0
golang.org/x/net v0.43.0
golang.org/x/term v0.34.0
golang.org/x/text v0.28.0
golang.org/x/time v0.12.0
gopkg.in/vansante/go-ffprobe.v2 v2.2.1
gorm.io/gorm v1.30.1
)
require github.com/Eyevinn/hls-m3u8 v0.6.0
require (
github.com/99designs/gqlgen v0.17.54 // indirect
github.com/RoaringBitmap/roaring v1.2.3 // indirect
github.com/ajwerner/btree v0.0.0-20211221152037-f427b3e689c0 // indirect
github.com/alecthomas/atomic v0.1.0-alpha2 // indirect
github.com/anacrolix/chansync v0.4.1-0.20240627045151-1aa1ac392fe8 // indirect
github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444 // indirect
github.com/anacrolix/envpprof v1.3.0 // indirect
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca // indirect
github.com/anacrolix/go-libutp v1.3.2 // indirect
github.com/anacrolix/missinggo v1.3.0 // indirect
github.com/anacrolix/missinggo/perf v1.0.0 // indirect
github.com/anacrolix/missinggo/v2 v2.7.4 // indirect
github.com/anacrolix/mmsg v1.0.1 // indirect
github.com/anacrolix/multiless v0.4.0 // indirect
github.com/anacrolix/stm v0.4.0 // indirect
github.com/anacrolix/sync v0.5.1 // indirect
github.com/anacrolix/upnp v0.1.4 // indirect
github.com/anacrolix/utp v0.1.0 // indirect
github.com/andybalholm/brotli v1.2.0 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/antchfx/htmlquery v1.3.2 // indirect
github.com/antchfx/xmlquery v1.4.1 // indirect
github.com/antchfx/xpath v1.3.1 // indirect
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/benbjohnson/immutable v0.3.0 // indirect
github.com/bits-and-blooms/bitset v1.2.2 // indirect
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect
github.com/cespare/xxhash v1.1.0 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/x/ansi v0.8.0 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/dop251/base64dec v0.0.0-20231022112746-c6c9f9a96217 // indirect
github.com/edsrzf/mmap-go v1.1.0 // indirect
github.com/glebarez/go-sqlite v1.21.2 // indirect
github.com/go-llsqlite/adapter v0.0.0-20230927005056-7f5ce7f0c916 // indirect
github.com/go-llsqlite/crawshaw v0.5.2-0.20240425034140-f30eb7704568 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/godbus/dbus/v5 v5.1.0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/btree v1.1.2 // indirect
github.com/google/pprof v0.0.0-20250423184734-337e5dd93bb4 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hekmon/cunits/v2 v2.1.0 // indirect
github.com/huandu/xstrings v1.3.2 // indirect
github.com/icholy/digest v1.1.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/kennygrant/sanitize v1.2.4 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.3 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mr-tron/base58 v1.2.0 // indirect
github.com/mschoch/smat v0.2.0 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/multiformats/go-multihash v0.2.3 // indirect
github.com/multiformats/go-varint v0.0.6 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/neilotoole/fifomu v0.1.2 // indirect
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pion/datachannel v1.5.9 // indirect
github.com/pion/dtls/v3 v3.0.3 // indirect
github.com/pion/ice/v4 v4.0.2 // indirect
github.com/pion/interceptor v0.1.37 // indirect
github.com/pion/logging v0.2.2 // indirect
github.com/pion/mdns/v2 v2.0.7 // indirect
github.com/pion/randutil v0.1.0 // indirect
github.com/pion/rtcp v1.2.14 // indirect
github.com/pion/rtp v1.8.9 // indirect
github.com/pion/sctp v1.8.33 // indirect
github.com/pion/sdp/v3 v3.0.9 // indirect
github.com/pion/srtp/v3 v3.0.4 // indirect
github.com/pion/stun/v3 v3.0.0 // indirect
github.com/pion/transport/v3 v3.0.7 // indirect
github.com/pion/turn/v4 v4.0.0 // indirect
github.com/pion/webrtc/v4 v4.0.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/protolambda/ctxlock v0.1.0 // indirect
github.com/quic-go/qpack v0.5.1 // indirect
github.com/quic-go/quic-go v0.53.0 // indirect
github.com/refraction-networking/utls v1.7.3 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.12.0 // indirect
github.com/rs/dnscache v0.0.0-20211102005908-e0241e321417 // indirect
github.com/sagikazarmark/locafero v0.7.0 // indirect
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
github.com/sosodev/duration v1.3.1 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1
github.com/spf13/pflag v1.0.6 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/tadvi/systray v0.0.0-20190226123456-11a2b8fa57af // indirect
github.com/temoto/robotstxt v1.1.2 // indirect
github.com/tidwall/btree v1.6.0 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasttemplate v1.2.2 // indirect
github.com/vektah/gqlparser/v2 v2.5.16 // indirect
github.com/wlynxg/anet v0.0.3 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
go.etcd.io/bbolt v1.3.6 // indirect
go.opentelemetry.io/otel v1.29.0 // indirect
go.opentelemetry.io/otel/metric v1.29.0 // indirect
go.opentelemetry.io/otel/trace v1.29.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/mock v0.5.2 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 // indirect
golang.org/x/mod v0.26.0 // indirect
golang.org/x/sync v0.16.0
golang.org/x/sys v0.35.0 // indirect
golang.org/x/tools v0.35.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
lukechampine.com/blake3 v1.1.6 // indirect
modernc.org/libc v1.41.0 // indirect
modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.7.2 // indirect
modernc.org/sqlite v1.23.1 // indirect
zombiezen.com/go/sqlite v0.13.1 // indirect
)

800
seanime-2.9.10/go.sum Normal file
View File

@@ -0,0 +1,800 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
crawshaw.io/iox v0.0.0-20181124134642-c51c3df30797/go.mod h1:sXBiorCo8c46JlQV3oXPKINnZ8mcqnye1EkVkqsectk=
crawshaw.io/sqlite v0.3.2/go.mod h1:igAO5JulrQ1DbdZdtVq48mnZUBAPOeFzer7VhDWNtW4=
filippo.io/edwards25519 v1.0.0-rc.1 h1:m0VOOB23frXZvAOK44usCgLWvtsxIoMCTBGJZlpmGfU=
filippo.io/edwards25519 v1.0.0-rc.1/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns=
fyne.io/systray v1.11.0 h1:D9HISlxSkx+jHSniMBR6fCFOUjk1x/OOOJLa9lJYAKg=
fyne.io/systray v1.11.0/go.mod h1:RVwqP9nYMo7h5zViCBHri2FgjXF7H2cub7MAq4NSoLs=
github.com/5rahim/go-astisub v0.2.1 h1:DVPOJmrIIY5i5yyQgJzFqfakuZpbiWzioRKPacNW4BY=
github.com/5rahim/go-astisub v0.2.1/go.mod h1:UgSAUWCMt+ifn227w/g1n7XSlqDYFHMYPYAl/84jljU=
github.com/5rahim/gomkv v0.2.1 h1:Xl1H64vke40XLg4QjdNfnDYWmwV9pJNGSlYi3b4jrSY=
github.com/5rahim/gomkv v0.2.1/go.mod h1:yRpTeQRAG46ozjeaydjJWB7FiMtlqavTkM1vfqtv7j8=
github.com/5rahim/habari v0.1.7 h1:MBcsneiZPEL+bIWHXqhcrht2Pjubi2rWn8O7WQHbJkA=
github.com/5rahim/habari v0.1.7/go.mod h1:0nBj4/5OxTAoIICP4P3+/YJGNf8L7w+gnU1ivj7nFJA=
github.com/99designs/gqlgen v0.17.54 h1:AsF49k/7RJlwA00RQYsYN0T8cQuaosnV/7G1dHC3Uh8=
github.com/99designs/gqlgen v0.17.54/go.mod h1:77/+pVe6zlTsz++oUg2m8VLgzdUPHxjoAG3BxI5y8Rc=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Eyevinn/hls-m3u8 v0.6.0 h1:i4eyofj5zStgUPcy+UwUQ4oOgcLJVGbrw4XOcxVMVw8=
github.com/Eyevinn/hls-m3u8 v0.6.0/go.mod h1:9jzVfwCo1+TC6yz+TKDBt9gIshzI9fhVE7M5AhcOSnQ=
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
github.com/RoaringBitmap/roaring v0.4.7/go.mod h1:8khRDP4HmeXns4xIj9oGrKSz7XTQiJx2zgh7AcNke4w=
github.com/RoaringBitmap/roaring v0.4.17/go.mod h1:D3qVegWTmfCaX4Bl5CrBE9hfrSrrXIr8KVNvRsDi1NI=
github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo=
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/Yamashou/gqlgenc v0.25.4 h1:b+RMy15GX1p9rtMWvjivX3kxyhROypHh/THruHRRjcE=
github.com/Yamashou/gqlgenc v0.25.4/go.mod h1:G0g1N81xpIklVdnyboW1zwOHcj/n4hNfhTwfN29Rjig=
github.com/adrg/strutil v0.3.1 h1:OLvSS7CSJO8lBii4YmBt8jiK9QOtB9CzCzwl4Ic/Fz4=
github.com/adrg/strutil v0.3.1/go.mod h1:8h90y18QLrs11IBffcGX3NW/GFBXCMcNg4M7H6MspPA=
github.com/ajwerner/btree v0.0.0-20211221152037-f427b3e689c0 h1:byYvvbfSo3+9efR4IeReh77gVs4PnNDR3AMOE9NJ7a0=
github.com/ajwerner/btree v0.0.0-20211221152037-f427b3e689c0/go.mod h1:q37NoqncT41qKc048STsifIt69LfUJ8SrWWcz/yam5k=
github.com/alecthomas/assert/v2 v2.0.0-alpha3 h1:pcHeMvQ3OMstAWgaeaXIAL8uzB9xMm2zlxt+/4ml8lk=
github.com/alecthomas/assert/v2 v2.0.0-alpha3/go.mod h1:+zD0lmDXTeQj7TgDgCt0ePWxb0hMC1G+PGTsTCv1B9o=
github.com/alecthomas/atomic v0.1.0-alpha2 h1:dqwXmax66gXvHhsOS4pGPZKqYOlTkapELkLb3MNdlH8=
github.com/alecthomas/atomic v0.1.0-alpha2/go.mod h1:zD6QGEyw49HIq19caJDc2NMXAy8rNi9ROrxtMXATfyI=
github.com/alecthomas/repr v0.0.0-20210801044451-80ca428c5142 h1:8Uy0oSf5co/NZXje7U1z8Mpep++QJOldL2hs/sBQf48=
github.com/alecthomas/repr v0.0.0-20210801044451-80ca428c5142/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/anacrolix/chansync v0.4.1-0.20240627045151-1aa1ac392fe8 h1:eyb0bBaQKMOh5Se/Qg54shijc8K4zpQiOjEhKFADkQM=
github.com/anacrolix/chansync v0.4.1-0.20240627045151-1aa1ac392fe8/go.mod h1:DZsatdsdXxD0WiwcGl0nJVwyjCKMDv+knl1q2iBjA2k=
github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444 h1:8V0K09lrGoeT2KRJNOtspA7q+OMxGwQqK/Ug0IiaaRE=
github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444/go.mod h1:MctKM1HS5YYDb3F30NGJxLE+QPuqWoT5ReW/4jt8xew=
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
github.com/anacrolix/envpprof v1.0.0/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
github.com/anacrolix/envpprof v1.1.0/go.mod h1:My7T5oSqVfEn4MD4Meczkw/f5lSIndGAKu/0SM/rkf4=
github.com/anacrolix/envpprof v1.3.0 h1:WJt9bpuT7A/CDCxPOv/eeZqHWlle/Y0keJUvc6tcJDk=
github.com/anacrolix/envpprof v1.3.0/go.mod h1:7QIG4CaX1uexQ3tqd5+BRa/9e2D02Wcertl6Yh0jCB0=
github.com/anacrolix/generics v0.0.0-20230113004304-d6428d516633/go.mod h1:ff2rHB/joTV03aMSSn/AZNnaIpUw0h3njetGsaXcMy8=
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca h1:aiiGqSQWjtVNdi8zUMfA//IrM8fPkv2bWwZVPbDe0wg=
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca/go.mod h1:MN3ve08Z3zSV/rTuX/ouI4lNdlfTxgdafQJiLzyNRB8=
github.com/anacrolix/go-libutp v1.3.2 h1:WswiaxTIogchbkzNgGHuHRfbrYLpv4o290mlvcx+++M=
github.com/anacrolix/go-libutp v1.3.2/go.mod h1:fCUiEnXJSe3jsPG554A200Qv+45ZzIIyGEvE56SHmyA=
github.com/anacrolix/log v0.3.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU=
github.com/anacrolix/log v0.6.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU=
github.com/anacrolix/log v0.13.1/go.mod h1:D4+CvN8SnruK6zIFS/xPoRJmtvtnxs+CSfDQ+BFxZ68=
github.com/anacrolix/log v0.14.2/go.mod h1:1OmJESOtxQGNMlUO5rcv96Vpp9mfMqXXbe2RdinFLdY=
github.com/anacrolix/log v0.16.0 h1:DSuyb5kAJwl3Y0X1TRcStVrTS9ST9b0BHW+7neE4Xho=
github.com/anacrolix/log v0.16.0/go.mod h1:m0poRtlr41mriZlXBQ9SOVZ8yZBkLjOkDhd5Li5pITA=
github.com/anacrolix/lsan v0.0.0-20211126052245-807000409a62 h1:P04VG6Td13FHMgS5ZBcJX23NPC/fiC4cp9bXwYujdYM=
github.com/anacrolix/lsan v0.0.0-20211126052245-807000409a62/go.mod h1:66cFKPCO7Sl4vbFnAaSq7e4OXtdMhRSBagJGWgmpJbM=
github.com/anacrolix/missinggo v0.0.0-20180725070939-60ef2fbf63df/go.mod h1:kwGiTUTZ0+p4vAz3VbAI5a30t2YbvemcmspjKwrAz5s=
github.com/anacrolix/missinggo v1.1.0/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo=
github.com/anacrolix/missinggo v1.1.2-0.20190815015349-b888af804467/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo=
github.com/anacrolix/missinggo v1.2.1/go.mod h1:J5cMhif8jPmFoC3+Uvob3OXXNIhOUikzMt+uUjeM21Y=
github.com/anacrolix/missinggo v1.3.0 h1:06HlMsudotL7BAELRZs0yDZ4yVXsHXGi323QBjAVASw=
github.com/anacrolix/missinggo v1.3.0/go.mod h1:bqHm8cE8xr+15uVfMG3BFui/TxyB6//H5fwlq/TeqMc=
github.com/anacrolix/missinggo/perf v1.0.0 h1:7ZOGYziGEBytW49+KmYGTaNfnwUqP1HBsy6BqESAJVw=
github.com/anacrolix/missinggo/perf v1.0.0/go.mod h1:ljAFWkBuzkO12MQclXzZrosP5urunoLS0Cbvb4V0uMQ=
github.com/anacrolix/missinggo/v2 v2.2.0/go.mod h1:o0jgJoYOyaoYQ4E2ZMISVa9c88BbUBVQQW4QeRkNCGY=
github.com/anacrolix/missinggo/v2 v2.5.1/go.mod h1:WEjqh2rmKECd0t1VhQkLGTdIWXO6f6NLjp5GlMZ+6FA=
github.com/anacrolix/missinggo/v2 v2.7.4 h1:47h5OXoPV8JbA/ACA+FLwKdYbAinuDO8osc2Cu9xkxg=
github.com/anacrolix/missinggo/v2 v2.7.4/go.mod h1:vVO5FEziQm+NFmJesc7StpkquZk+WJFCaL0Wp//2sa0=
github.com/anacrolix/mmsg v1.0.1 h1:TxfpV7kX70m3f/O7ielL/2I3OFkMPjrRCPo7+4X5AWw=
github.com/anacrolix/mmsg v1.0.1/go.mod h1:x8kRaJY/dCrY9Al0PEcj1mb/uFHwP6GCJ9fLl4thEPc=
github.com/anacrolix/multiless v0.4.0 h1:lqSszHkliMsZd2hsyrDvHOw4AbYWa+ijQ66LzbjqWjM=
github.com/anacrolix/multiless v0.4.0/go.mod h1:zJv1JF9AqdZiHwxqPgjuOZDGWER6nyE48WBCi/OOrMM=
github.com/anacrolix/stm v0.2.0/go.mod h1:zoVQRvSiGjGoTmbM0vSLIiaKjWtNPeTvXUSdJQA4hsg=
github.com/anacrolix/stm v0.4.0 h1:tOGvuFwaBjeu1u9X1eIh9TX8OEedEiEQ1se1FjhFnXY=
github.com/anacrolix/stm v0.4.0/go.mod h1:GCkwqWoAsP7RfLW+jw+Z0ovrt2OO7wRzcTtFYMYY5t8=
github.com/anacrolix/sync v0.0.0-20180808010631-44578de4e778/go.mod h1:s735Etp3joe/voe2sdaXLcqDdJSay1O0OPnM0ystjqk=
github.com/anacrolix/sync v0.3.0/go.mod h1:BbecHL6jDSExojhNtgTFSBcdGerzNc64tz3DCOj/I0g=
github.com/anacrolix/sync v0.5.1 h1:FbGju6GqSjzVoTgcXTUKkF041lnZkG5P0C3T5RL3SGc=
github.com/anacrolix/sync v0.5.1/go.mod h1:BbecHL6jDSExojhNtgTFSBcdGerzNc64tz3DCOj/I0g=
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/anacrolix/tagflag v1.0.0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/anacrolix/tagflag v1.1.0/go.mod h1:Scxs9CV10NQatSmbyjqmqmeQNwGzlNe0CMUMIxqHIG8=
github.com/anacrolix/torrent v1.58.1 h1:6FP+KH57b1gyT2CpVL9fEqf9MGJEgh3xw1VA8rI0pW8=
github.com/anacrolix/torrent v1.58.1/go.mod h1:/7ZdLuHNKgtCE1gjYJCfbtG9JodBcDaF5ip5EUWRtk8=
github.com/anacrolix/upnp v0.1.4 h1:+2t2KA6QOhm/49zeNyeVwDu1ZYS9dB9wfxyVvh/wk7U=
github.com/anacrolix/upnp v0.1.4/go.mod h1:Qyhbqo69gwNWvEk1xNTXsS5j7hMHef9hdr984+9fIic=
github.com/anacrolix/utp v0.1.0 h1:FOpQOmIwYsnENnz7tAGohA+r6iXpRjrq8ssKSre2Cp4=
github.com/anacrolix/utp v0.1.0/go.mod h1:MDwc+vsGEq7RMw6lr2GKOEqjWny5hO5OZXRVNaBJ2Dk=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/antchfx/htmlquery v1.3.2 h1:85YdttVkR1rAY+Oiv/nKI4FCimID+NXhDn82kz3mEvs=
github.com/antchfx/htmlquery v1.3.2/go.mod h1:1mbkcEgEarAokJiWhTfr4hR06w/q2ZZjnYLrDt6CTUk=
github.com/antchfx/xmlquery v1.4.1 h1:YgpSwbeWvLp557YFTi8E3z6t6/hYjmFEtiEKbDfEbl0=
github.com/antchfx/xmlquery v1.4.1/go.mod h1:lKezcT8ELGt8kW5L+ckFMTbgdR61/odpPgDv8Gvi1fI=
github.com/antchfx/xpath v1.3.1 h1:PNbFuUqHwWl0xRjvUPjJ95Agbmdj2uzzIwmQKgu4oCk=
github.com/antchfx/xpath v1.3.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/asticode/go-astikit v0.20.0 h1:+7N+J4E4lWx2QOkRdOf6DafWJMv6O4RRfgClwQokrH8=
github.com/asticode/go-astikit v0.20.0/go.mod h1:h4ly7idim1tNhaVkdVBeXQZEE3L0xblP7fCWbgwipF0=
github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg=
github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
github.com/benbjohnson/immutable v0.2.0/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
github.com/benbjohnson/immutable v0.3.0 h1:TVRhuZx2wG9SZ0LRdqlbs9S5BZ6Y24hJEHTCgWHZEIw=
github.com/benbjohnson/immutable v0.3.0/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/bits-and-blooms/bitset v1.2.2 h1:J5gbX05GpMdBjCvQ9MteIg2KKDExr7DrgK+Yc15FvIk=
github.com/bits-and-blooms/bitset v1.2.2/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8=
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/cli/browser v1.3.0 h1:LejqCrpWr+1pRqmEPDGnTZOjsMe7sehifLynZJuqJpo=
github.com/cli/browser v1.3.0/go.mod h1:HH8s+fOAxjhQoBUAsKuPCbqUuxZDhQ2/aD+SzsEfBTk=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dop251/base64dec v0.0.0-20231022112746-c6c9f9a96217 h1:16iT9CBDOniJwFGPI41MbUDfEk74hFaKTqudrX8kenY=
github.com/dop251/base64dec v0.0.0-20231022112746-c6c9f9a96217/go.mod h1:eIb+f24U+eWQCIsj9D/ah+MD9UP+wdxuqzsdLD+mhGM=
github.com/dop251/goja v0.0.0-20250531102226-cb187b08699c h1:In87uFQZsuGfjDDNfWnzMVY6JVTwc8XYMl6W2DAmNjk=
github.com/dop251/goja v0.0.0-20250531102226-cb187b08699c/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
github.com/dop251/goja_nodejs v0.0.0-20250409162600-f7acab6894b0 h1:fuHXpEVTTk7TilRdfGRLHpiTD6tnT0ihEowCfWjlFvw=
github.com/dop251/goja_nodejs v0.0.0-20250409162600-f7acab6894b0/go.mod h1:Tb7Xxye4LX7cT3i8YLvmPMGCV92IOi4CDZvm/V8ylc0=
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/edsrzf/mmap-go v1.1.0 h1:6EUwBLQ/Mcr1EYLE4Tn1VdW1A4ckqCQWZBw8Hr0kjpQ=
github.com/edsrzf/mmap-go v1.1.0/go.mod h1:19H/e8pUPLicwkyNgOykDXkJ9F0MHE+Z52B8EIth78Q=
github.com/evanw/esbuild v0.25.8 h1:nSMdIN7nu2UH6APeDSpaQnz90JOPJxcVZe9DfI0ezjc=
github.com/evanw/esbuild v0.25.8/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48=
github.com/frankban/quicktest v1.9.0/go.mod h1:ui7WezCLWMWxVWr1GETZY3smRy0G4KWq9vcPtJmFl7Y=
github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/gen2brain/beeep v0.0.0-20240516210008-9c006672e7f4 h1:ygs9POGDQpQGLJPlq4+0LBUmMBNox1N4JSpw+OETcvI=
github.com/gen2brain/beeep v0.0.0-20240516210008-9c006672e7f4/go.mod h1:0W7dI87PvXJ1Sjs0QPvWXKcQmNERY77e8l7GFhZB/s4=
github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo=
github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k=
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/glycerine/goconvey v0.0.0-20190315024820-982ee783a72e/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-llsqlite/adapter v0.0.0-20230927005056-7f5ce7f0c916 h1:OyQmpAN302wAopDgwVjgs2HkFawP9ahIEqkUYz7V7CA=
github.com/go-llsqlite/adapter v0.0.0-20230927005056-7f5ce7f0c916/go.mod h1:DADrR88ONKPPeSGjFp5iEN55Arx3fi2qXZeKCYDpbmU=
github.com/go-llsqlite/crawshaw v0.5.2-0.20240425034140-f30eb7704568 h1:3EpZo8LxIzF4q3BT+vttQQlRfA6uTtTb/cxVisWa5HM=
github.com/go-llsqlite/crawshaw v0.5.2-0.20240425034140-f30eb7704568/go.mod h1:/YJdV7uBQaYDE0fwe4z3wwJIZBJxdYzd38ICggWqtaE=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
github.com/go-sourcemap/sourcemap v2.1.4+incompatible h1:a+iTbH5auLKxaNwQFg0B+TCYl6lbukKPc7b5x0n1s6Q=
github.com/go-sourcemap/sourcemap v2.1.4+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI=
github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/gonutz/w32/v2 v2.12.1 h1:ZTWg6ZlETDfWK1Qxx+rdWQdQWZwfhiXoyvxzFYdgsUY=
github.com/gonutz/w32/v2 v2.12.1/go.mod h1:MgtHx0AScDVNKyB+kjyPder4xIi3XAcHS6LDDU2DmdE=
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU=
github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20250423184734-337e5dd93bb4 h1:gD0vax+4I+mAj+jEChEf25Ia07Jq7kYOFO5PPhAxFl4=
github.com/google/pprof v0.0.0-20250423184734-337e5dd93bb4/go.mod h1:5hDyRhoBCxViHszMt12TnOpEI4VVi+U8Gm9iphldiMA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190309154008-847fc94819f9/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hekmon/cunits/v2 v2.1.0 h1:k6wIjc4PlacNOHwKEMBgWV2/c8jyD4eRMs5mR1BBhI0=
github.com/hekmon/cunits/v2 v2.1.0/go.mod h1:9r1TycXYXaTmEWlAIfFV8JT+Xo59U96yUJAYHxzii2M=
github.com/hekmon/transmissionrpc/v3 v3.0.0 h1:0Fb11qE0IBh4V4GlOwHNYpqpjcYDp5GouolwrpmcUDQ=
github.com/hekmon/transmissionrpc/v3 v3.0.0/go.mod h1:38SlNhFzinVUuY87wGj3acOmRxeYZAZfrj6Re7UgCDg=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4=
github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw=
github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc=
github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8=
github.com/icholy/digest v1.1.0 h1:HfGg9Irj7i+IX1o1QAmPfIBNu/Q5A5Tu3n/MED9k9H4=
github.com/icholy/digest v1.1.0/go.mod h1:QNrsSGQ5v7v9cReDI0+eyjsXGUoRSUZQHeQ5C4XLa0Y=
github.com/imroc/req/v3 v3.54.0 h1:kwWJSpT7OvjJ/Q8ykp+69Ye5H486RKDcgEoepw1Ren4=
github.com/imroc/req/v3 v3.54.0/go.mod h1:P8gCJjG/XNUFeP6WOi40VAXfYwT+uPM00xvoBWiwzUQ=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o=
github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU=
github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/labstack/echo/v4 v4.13.4 h1:oTZZW+T3s9gAu5L8vmzihV7/lkXGZuITzTQkTEhcXEA=
github.com/labstack/echo/v4 v4.13.4/go.mod h1:g63b33BZ5vZzcIUF8AtRH40DrTlXnx4UMC8rBdndmjQ=
github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ=
github.com/matryer/is v1.4.1/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mileusna/useragent v1.3.5 h1:SJM5NzBmh/hO+4LGeATKpaEX9+b4vcGg2qXGLiNGDws=
github.com/mileusna/useragent v1.3.5/go.mod h1:3d8TOmwL/5I8pJjyVDteHtgDGcefrFUX4ccGOMKNYYc=
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
github.com/mmcdole/gofeed v1.3.0 h1:5yn+HeqlcvjMeAI4gu6T+crm7d0anY85+M+v6fIFNG4=
github.com/mmcdole/gofeed v1.3.0/go.mod h1:9TGv2LcJhdXePDzxiuMnukhV2/zb6VtnZt1mS+SjkLE=
github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23 h1:Zr92CAlFhy2gL+V1F+EyIuzbQNbSgP4xhTODZtrXUtk=
github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23/go.mod h1:v+25+lT2ViuQ7mVxcncQ8ch1URund48oH+jhjiwEgS8=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc=
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM=
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/multiformats/go-multihash v0.2.3 h1:7Lyc8XfX/IY2jWb/gI7JP+o7JEq9hOa7BFvVU9RSh+U=
github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsCKRVch90MdaGiKsvSM=
github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY=
github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/ncruces/go-dns v1.2.7 h1:NMA7vFqXUl+nBhGFlleLyo2ni3Lqv3v+qFWZidzRemI=
github.com/ncruces/go-dns v1.2.7/go.mod h1:SqmhVMBd8Wr7hsu3q6yTt6/Jno/xLMrbse/JLOMBo1Y=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/neilotoole/fifomu v0.1.2 h1:sgJhcOTlEXGVj/nS5Bb8/qV+1wgmk+KPavcNuDw0rDM=
github.com/neilotoole/fifomu v0.1.2/go.mod h1:9di2j+xBgr+nX6IPmpwQVxKt6yzgPLk9WXEj/aLwcao=
github.com/neilotoole/streamcache v0.3.5 h1:8YVgTcd3OpTC46zduXJE4WAhTjxQCEBHYVGDgQ6R3ss=
github.com/neilotoole/streamcache v0.3.5/go.mod h1:yYJLcdAWI6jMeSSIfQ8vIydWxsrdAdGGNbYMXvWKV6M=
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ=
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U=
github.com/nwaples/rardecode/v2 v2.1.1 h1:OJaYalXdliBUXPmC8CZGQ7oZDxzX1/5mQmgn0/GASew=
github.com/nwaples/rardecode/v2 v2.1.1/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pion/datachannel v1.5.9 h1:LpIWAOYPyDrXtU+BW7X0Yt/vGtYxtXQ8ql7dFfYUVZA=
github.com/pion/datachannel v1.5.9/go.mod h1:kDUuk4CU4Uxp82NH4LQZbISULkX/HtzKa4P7ldf9izE=
github.com/pion/dtls/v3 v3.0.3 h1:j5ajZbQwff7Z8k3pE3S+rQ4STvKvXUdKsi/07ka+OWM=
github.com/pion/dtls/v3 v3.0.3/go.mod h1:weOTUyIV4z0bQaVzKe8kpaP17+us3yAuiQsEAG1STMU=
github.com/pion/ice/v4 v4.0.2 h1:1JhBRX8iQLi0+TfcavTjPjI6GO41MFn4CeTBX+Y9h5s=
github.com/pion/ice/v4 v4.0.2/go.mod h1:DCdqyzgtsDNYN6/3U8044j3U7qsJ9KFJC92VnOWHvXg=
github.com/pion/interceptor v0.1.37 h1:aRA8Zpab/wE7/c0O3fh1PqY0AJI3fCSEM5lRWJVorwI=
github.com/pion/interceptor v0.1.37/go.mod h1:JzxbJ4umVTlZAf+/utHzNesY8tmRkM2lVmkS82TTj8Y=
github.com/pion/logging v0.2.2 h1:M9+AIj/+pxNsDfAT64+MAVgJO0rsyLnoJKCqf//DoeY=
github.com/pion/logging v0.2.2/go.mod h1:k0/tDVsRCX2Mb2ZEmTqNa7CWsQPc+YYCB7Q+5pahoms=
github.com/pion/mdns/v2 v2.0.7 h1:c9kM8ewCgjslaAmicYMFQIde2H9/lrZpjBkN8VwoVtM=
github.com/pion/mdns/v2 v2.0.7/go.mod h1:vAdSYNAT0Jy3Ru0zl2YiW3Rm/fJCwIeM0nToenfOJKA=
github.com/pion/randutil v0.1.0 h1:CFG1UdESneORglEsnimhUjf33Rwjubwj6xfiOXBa3mA=
github.com/pion/randutil v0.1.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
github.com/pion/rtcp v1.2.14 h1:KCkGV3vJ+4DAJmvP0vaQShsb0xkRfWkO540Gy102KyE=
github.com/pion/rtcp v1.2.14/go.mod h1:sn6qjxvnwyAkkPzPULIbVqSKI5Dv54Rv7VG0kNxh9L4=
github.com/pion/rtp v1.8.9 h1:E2HX740TZKaqdcPmf4pw6ZZuG8u5RlMMt+l3dxeu6Wk=
github.com/pion/rtp v1.8.9/go.mod h1:pBGHaFt/yW7bf1jjWAoUjpSNoDnw98KTMg+jWWvziqU=
github.com/pion/sctp v1.8.33 h1:dSE4wX6uTJBcNm8+YlMg7lw1wqyKHggsP5uKbdj+NZw=
github.com/pion/sctp v1.8.33/go.mod h1:beTnqSzewI53KWoG3nqB282oDMGrhNxBdb+JZnkCwRM=
github.com/pion/sdp/v3 v3.0.9 h1:pX++dCHoHUwq43kuwf3PyJfHlwIj4hXA7Vrifiq0IJY=
github.com/pion/sdp/v3 v3.0.9/go.mod h1:B5xmvENq5IXJimIO4zfp6LAe1fD9N+kFv+V/1lOdz8M=
github.com/pion/srtp/v3 v3.0.4 h1:2Z6vDVxzrX3UHEgrUyIGM4rRouoC7v+NiF1IHtp9B5M=
github.com/pion/srtp/v3 v3.0.4/go.mod h1:1Jx3FwDoxpRaTh1oRV8A/6G1BnFL+QI82eK4ms8EEJQ=
github.com/pion/stun/v3 v3.0.0 h1:4h1gwhWLWuZWOJIJR9s2ferRO+W3zA/b6ijOI6mKzUw=
github.com/pion/stun/v3 v3.0.0/go.mod h1:HvCN8txt8mwi4FBvS3EmDghW6aQJ24T+y+1TKjB5jyU=
github.com/pion/transport/v3 v3.0.7 h1:iRbMH05BzSNwhILHoBoAPxoB9xQgOaJk+591KC9P1o0=
github.com/pion/transport/v3 v3.0.7/go.mod h1:YleKiTZ4vqNxVwh77Z0zytYi7rXHl7j6uPLGhhz9rwo=
github.com/pion/turn/v4 v4.0.0 h1:qxplo3Rxa9Yg1xXDxxH8xaqcyGUtbHYw4QSCvmFWvhM=
github.com/pion/turn/v4 v4.0.0/go.mod h1:MuPDkm15nYSklKpN8vWJ9W2M0PlyQZqYt1McGuxG7mA=
github.com/pion/webrtc/v4 v4.0.0 h1:x8ec7uJQPP3D1iI8ojPAiTOylPI7Fa7QgqZrhpLyqZ8=
github.com/pion/webrtc/v4 v4.0.0/go.mod h1:SfNn8CcFxR6OUVjLXVslAQ3a3994JhyE3Hw1jAuqEto=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.4.0/go.mod h1:NWz/XGvpEW1FyYQ7fCx4dqYBLlfTcE+A9FLAkNKqjFE=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/protolambda/ctxlock v0.1.0 h1:rCUY3+vRdcdZXqT07iXgyr744J2DU2LCBIXowYAjBCE=
github.com/protolambda/ctxlock v0.1.0/go.mod h1:vefhX6rIZH8rsg5ZpOJfEDYQOppZi19SfPiGOFrNnwM=
github.com/quic-go/qpack v0.5.1 h1:giqksBPnT/HDtZ6VhtFKgoLOWmlyo9Ei6u9PqzIMbhI=
github.com/quic-go/qpack v0.5.1/go.mod h1:+PC4XFrEskIVkcLzpEkbLqq1uCoxPhQuvK5rH1ZgaEg=
github.com/quic-go/quic-go v0.53.0 h1:QHX46sISpG2S03dPeZBgVIZp8dGagIaiu2FiVYvpCZI=
github.com/quic-go/quic-go v0.53.0/go.mod h1:e68ZEaCdyviluZmy44P6Iey98v/Wfz6HCjQEm+l8zTY=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/refraction-networking/utls v1.7.3 h1:L0WRhHY7Oq1T0zkdzVZMR6zWZv+sXbHB9zcuvsAEqCo=
github.com/refraction-networking/utls v1.7.3/go.mod h1:TUhh27RHMGtQvjQq+RyO11P6ZNQNBb3N0v7wsEjKAIQ=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/dnscache v0.0.0-20211102005908-e0241e321417 h1:Lt9DzQALzHoDwMBGJ6v8ObDPR0dzr2a6sXTB1Fq7IHs=
github.com/rs/dnscache v0.0.0-20211102005908-e0241e321417/go.mod h1:qe5TWALJ8/a1Lqznoc5BDHpYX/8HU60Hm2AwRmqzxqA=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 h1:GHRpF1pTW19a8tTFrMLUcfWwyC0pnifVo2ClaLq+hP8=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA=
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
github.com/samber/lo v1.51.0 h1:kysRYLbHy/MB7kQZf5DSN50JHmMsNEdeY24VzJFu7wI=
github.com/samber/lo v1.51.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0=
github.com/samber/mo v1.15.0 h1:fxe9ouq0Mo7obixDigMJnmaDusT1yrvWHcqM8MmNDNU=
github.com/samber/mo v1.15.0/go.mod h1:BfkrCPuYzVG3ZljnZB783WIJIGk1mcZr9c9CPf8tAxs=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v0.0.0-20190215210624-980c5ac6f3ac/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/smartystreets/goconvey v0.0.0-20190306220146-200a235640ff/go.mod h1:KSQcGKpxUMHk3nbYzs/tIBAM2iDooCn0BmttHOJEbLs=
github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4=
github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tadvi/systray v0.0.0-20190226123456-11a2b8fa57af h1:6yITBqGTE2lEeTPG04SN9W+iWHCRyHqlVYILiSXziwk=
github.com/tadvi/systray v0.0.0-20190226123456-11a2b8fa57af/go.mod h1:4F09kP5F+am0jAwlQLddpoMDM+iewkxxt6nxUQ5nq5o=
github.com/temoto/robotstxt v1.1.2 h1:W2pOjSJ6SWvldyEuiFXNxz3xZ8aiWX5LbfDiOFd7Fxg=
github.com/temoto/robotstxt v1.1.2/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo=
github.com/tidwall/btree v1.6.0 h1:LDZfKfQIBHGHWSwckhXI0RPSXzlo+KYdjK7FWSqOzzg=
github.com/tidwall/btree v1.6.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY=
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tinylib/msgp v1.1.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/vektah/gqlparser/v2 v2.5.16 h1:1gcmLTvs3JLKXckwCwlUagVn/IlV2bwqle0vJ0vy5p8=
github.com/vektah/gqlparser/v2 v2.5.16/go.mod h1:1lz1OeCqgQbQepsGxPVywrjdBHW2T08PUS3pJqepRww=
github.com/willf/bitset v1.1.9/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/wlynxg/anet v0.0.3 h1:PvR53psxFXstc12jelG6f1Lv4MWqE0tI76/hHGjh9rg=
github.com/wlynxg/anet v0.0.3/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA=
github.com/xfrr/goffmpeg v1.0.0 h1:trxuLNb9ys50YlV7gTVNAII9J0r00WWqCGTE46Gc3XU=
github.com/xfrr/goffmpeg v1.0.0/go.mod h1:zjLRiirHnip+/hVAT3lVE3QZ6SGynr0hcctUMNNISdQ=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/ziflex/lecho/v3 v3.8.0 h1:de/IyTw5jykpb0GKGk7Di5Y6IeeLpr2PdEBvTvsktD0=
github.com/ziflex/lecho/v3 v3.8.0/go.mod h1:2GzFCQn/W809nLzikFiHkubtU08QRXyE6+VQ9nAhHPE=
go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU=
go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4=
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw=
go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8=
go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc=
go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8=
go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4=
go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko=
go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20220428152302-39d4317da171/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU=
golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4=
golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg=
golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0=
golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/vansante/go-ffprobe.v2 v2.2.1 h1:sFV08OT1eZ1yroLCZVClIVd9YySgCh9eGjBWO0oRayI=
gopkg.in/vansante/go-ffprobe.v2 v2.2.1/go.mod h1:qF0AlAjk7Nqzqf3y333Ly+KxN3cKF2JqA3JT5ZheUGE=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4=
gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
lukechampine.com/blake3 v1.1.6 h1:H3cROdztr7RCfoaTpGZFQsrqvweFLrqS73j7L7cmR5c=
lukechampine.com/blake3 v1.1.6/go.mod h1:tkKEOtDkNtklkXtLNEOGNq5tcV90tJiA1vAA12R78LA=
modernc.org/libc v1.41.0 h1:g9YAc6BkKlgORsUWj+JwqoB1wU3o4DE3bM3yvA3k+Gk=
modernc.org/libc v1.41.0/go.mod h1:w0eszPsiXoOnoMJgrXjglgLuDy/bt5RR4y3QzUUeodY=
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E=
modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
modernc.org/sqlite v1.23.1 h1:nrSBg4aRQQwq59JpvGEQ15tNxoO5pX/kUjcRNwSAGQM=
modernc.org/sqlite v1.23.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk=
zombiezen.com/go/sqlite v0.13.1 h1:qDzxyWWmMtSSEH5qxamqBFmqA2BLSSbtODi3ojaE02o=
zombiezen.com/go/sqlite v0.13.1/go.mod h1:Ht/5Rg3Ae2hoyh1I7gbWtWAl89CNocfqeb/aAMTkJr4=

View File

@@ -0,0 +1,83 @@
<p align="center">
<img src="../docs/images/logo_2.png" alt="preview" width="150px"/>
</p>
<h2 align="center"><b>Seanime Server</b></h2>
- `api`: Third-party APIs
- `anilist`: AniList structs and methods
- `anizip`: Metadata API
- `filler`: Filler API
- `listsync`
- `mal`: MyAnimeList API
- `mappings`: Mapping API
- `metadata`: **Metadata module** for anime
- `tvdb`: TheTVDB API
- `constants`: Version, keys
- `core`
- `app.go`: **Shared app struct**
- `config.go`: Configuration
- `extensions.go`: Load built-in extensions
- `fiber.go`: HTTP server
- `watcher.go`: Library watcher
- `cron`: Background tasks
- `database`
- `db`: **Database module**
- `db_bridge`: Helper methods to avoid circular dependencies
- `models`: Database models
- `debrid`: **Debrid module**
- `debrid`: Structs and interfaces
- `client`: **Debrid repository** for streaming, download
- `torbox`
- `realdebrid`
- `discordrpc`: Discord RPC
- `client`
- `ipc`
- `presence`: **Discord Rich Presence module**
- `events`: **Websocket Event Manager module** and constants
- `extensions`: Structs and interfaces
- `extension_playground`: **Extension Playground module**
- `extension_repo`: **Extension Repository module**
- `handlers`: API handlers
- `library`
- `anime`: Library structs and methods
- `autodownloader` **Auto downloader module**
- `autoscanner`: **Auto scanner module**
- `filesystem`: File system methods
- `playbackmanager`: **Playback Manager module** for progress tracking
- `scanner`: **Scanner module**
- `summary`: Scan summary
- `manga`: Manga structs and **Manga Downloader module**
- `downloader`: Chapter downloader structs and methods
- `providers`: Online provider structs and methods
- `mediaplayers`
- `mediaplayer`: **Media Player Repository** module
- `mpchc`
- `mpv`
- `mpvipc`
- `vlc`
- `mediastream`: **Media Stream Repository** module
- `transcoder`: Transcoder
- `videofile`: Media metadata
- `notifier`
- `onlinestream`: **Onlinestream module**
- `providers`: Stream providers
- `sources`: Video server sources
- `platforms`
- `platform`: Platform structs and methods
- `anilist_platform`
- `local_platform`
- `test_utils`: Test methods
- `torrentstream`: **Torrent Stream Repository** module
- `sync`: **Sync/Offline module**
- `test_utils`: Test methods
- `torrent_clients`
- `torrent_client`: **Torrent Client Repository** module
- `qbittorrent`
- `transmission`
- `torrents`
- `analyzer`: Scan and identify torrent files
- `animetosho`
- `nyaa`
- `seadex`
- `torrent`: Torrent structs and methods

View File

@@ -0,0 +1,14 @@
model:
filename: ./models_gen.go
client:
filename: ./client_gen.go
models:
DateTime:
model: github.com/99designs/gqlgen/graphql.Time
endpoint:
url: https://graphql.anilist.co
query:
- "./queries/*.graphql"
generate:
clientV2: true
clientInterfaceName: "GithubGraphQLClient"

View File

@@ -0,0 +1,407 @@
package anilist
import (
"compress/gzip"
"context"
"errors"
"fmt"
"io"
"net/http"
"seanime/internal/events"
"seanime/internal/util"
"strconv"
"time"
"github.com/Yamashou/gqlgenc/clientv2"
"github.com/Yamashou/gqlgenc/graphqljson"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
var (
// ErrNotAuthenticated is returned when trying to access an Anilist API endpoint that requires authentication,
// but the client is not authenticated.
ErrNotAuthenticated = errors.New("not authenticated")
)
type AnilistClient interface {
IsAuthenticated() bool
AnimeCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollection, error)
AnimeCollectionWithRelations(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollectionWithRelations, error)
BaseAnimeByMalID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByMalID, error)
BaseAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByID, error)
SearchBaseAnimeByIds(ctx context.Context, ids []*int, page *int, perPage *int, status []*MediaStatus, inCollection *bool, sort []*MediaSort, season *MediaSeason, year *int, genre *string, format *MediaFormat, interceptors ...clientv2.RequestInterceptor) (*SearchBaseAnimeByIds, error)
CompleteAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*CompleteAnimeByID, error)
AnimeDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*AnimeDetailsByID, error)
ListAnime(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, season *MediaSeason, seasonYear *int, format *MediaFormat, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListAnime, error)
ListRecentAnime(ctx context.Context, page *int, perPage *int, airingAtGreater *int, airingAtLesser *int, notYetAired *bool, interceptors ...clientv2.RequestInterceptor) (*ListRecentAnime, error)
UpdateMediaListEntry(ctx context.Context, mediaID *int, status *MediaListStatus, scoreRaw *int, progress *int, startedAt *FuzzyDateInput, completedAt *FuzzyDateInput, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntry, error)
UpdateMediaListEntryProgress(ctx context.Context, mediaID *int, progress *int, status *MediaListStatus, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryProgress, error)
UpdateMediaListEntryRepeat(ctx context.Context, mediaID *int, repeat *int, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryRepeat, error)
DeleteEntry(ctx context.Context, mediaListEntryID *int, interceptors ...clientv2.RequestInterceptor) (*DeleteEntry, error)
MangaCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*MangaCollection, error)
SearchBaseManga(ctx context.Context, page *int, perPage *int, sort []*MediaSort, search *string, status []*MediaStatus, interceptors ...clientv2.RequestInterceptor) (*SearchBaseManga, error)
BaseMangaByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseMangaByID, error)
MangaDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*MangaDetailsByID, error)
ListManga(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, startDateGreater *string, startDateLesser *string, format *MediaFormat, countryOfOrigin *string, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListManga, error)
ViewerStats(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ViewerStats, error)
StudioDetails(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*StudioDetails, error)
GetViewer(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*GetViewer, error)
AnimeAiringSchedule(ctx context.Context, ids []*int, season *MediaSeason, seasonYear *int, previousSeason *MediaSeason, previousSeasonYear *int, nextSeason *MediaSeason, nextSeasonYear *int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringSchedule, error)
AnimeAiringScheduleRaw(ctx context.Context, ids []*int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringScheduleRaw, error)
}
type (
// AnilistClientImpl is a wrapper around the AniList API client.
AnilistClientImpl struct {
Client *Client
logger *zerolog.Logger
token string // The token used for authentication with the AniList API
}
)
// NewAnilistClient creates a new AnilistClientImpl with the given token.
// The token is used for authorization when making requests to the AniList API.
func NewAnilistClient(token string) *AnilistClientImpl {
ac := &AnilistClientImpl{
token: token,
Client: &Client{
Client: clientv2.NewClient(http.DefaultClient, "https://graphql.anilist.co", nil,
func(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}, next clientv2.RequestInterceptorFunc) error {
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
if len(token) > 0 {
req.Header.Set("Authorization", "Bearer "+token)
}
return next(ctx, req, gqlInfo, res)
}),
},
logger: util.NewLogger(),
}
ac.Client.Client.CustomDo = ac.customDoFunc
return ac
}
func (ac *AnilistClientImpl) IsAuthenticated() bool {
if ac.Client == nil || ac.Client.Client == nil {
return false
}
if len(ac.token) == 0 {
return false
}
// If the token is not empty, we are authenticated
return true
}
////////////////////////////////
// Authenticated
////////////////////////////////
func (ac *AnilistClientImpl) UpdateMediaListEntry(ctx context.Context, mediaID *int, status *MediaListStatus, scoreRaw *int, progress *int, startedAt *FuzzyDateInput, completedAt *FuzzyDateInput, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntry, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry")
return ac.Client.UpdateMediaListEntry(ctx, mediaID, status, scoreRaw, progress, startedAt, completedAt, interceptors...)
}
func (ac *AnilistClientImpl) UpdateMediaListEntryProgress(ctx context.Context, mediaID *int, progress *int, status *MediaListStatus, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryProgress, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry progress")
return ac.Client.UpdateMediaListEntryProgress(ctx, mediaID, progress, status, interceptors...)
}
func (ac *AnilistClientImpl) UpdateMediaListEntryRepeat(ctx context.Context, mediaID *int, repeat *int, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryRepeat, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry repeat")
return ac.Client.UpdateMediaListEntryRepeat(ctx, mediaID, repeat, interceptors...)
}
func (ac *AnilistClientImpl) DeleteEntry(ctx context.Context, mediaListEntryID *int, interceptors ...clientv2.RequestInterceptor) (*DeleteEntry, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Int("entryId", *mediaListEntryID).Msg("anilist: Deleting media list entry")
return ac.Client.DeleteEntry(ctx, mediaListEntryID, interceptors...)
}
func (ac *AnilistClientImpl) AnimeCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollection, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching anime collection")
return ac.Client.AnimeCollection(ctx, userName, interceptors...)
}
func (ac *AnilistClientImpl) AnimeCollectionWithRelations(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollectionWithRelations, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching anime collection with relations")
return ac.Client.AnimeCollectionWithRelations(ctx, userName, interceptors...)
}
func (ac *AnilistClientImpl) GetViewer(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*GetViewer, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching viewer")
return ac.Client.GetViewer(ctx, interceptors...)
}
func (ac *AnilistClientImpl) MangaCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*MangaCollection, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching manga collection")
return ac.Client.MangaCollection(ctx, userName, interceptors...)
}
func (ac *AnilistClientImpl) ViewerStats(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ViewerStats, error) {
if !ac.IsAuthenticated() {
return nil, ErrNotAuthenticated
}
ac.logger.Debug().Msg("anilist: Fetching stats")
return ac.Client.ViewerStats(ctx, interceptors...)
}
////////////////////////////////
// Not authenticated
////////////////////////////////
func (ac *AnilistClientImpl) BaseAnimeByMalID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByMalID, error) {
return ac.Client.BaseAnimeByMalID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) BaseAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching anime")
return ac.Client.BaseAnimeByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) AnimeDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*AnimeDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching anime details")
return ac.Client.AnimeDetailsByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) CompleteAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*CompleteAnimeByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching complete media")
return ac.Client.CompleteAnimeByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) ListAnime(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, season *MediaSeason, seasonYear *int, format *MediaFormat, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching media list")
return ac.Client.ListAnime(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, season, seasonYear, format, isAdult, interceptors...)
}
func (ac *AnilistClientImpl) ListRecentAnime(ctx context.Context, page *int, perPage *int, airingAtGreater *int, airingAtLesser *int, notYetAired *bool, interceptors ...clientv2.RequestInterceptor) (*ListRecentAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching recent media list")
return ac.Client.ListRecentAnime(ctx, page, perPage, airingAtGreater, airingAtLesser, notYetAired, interceptors...)
}
func (ac *AnilistClientImpl) SearchBaseManga(ctx context.Context, page *int, perPage *int, sort []*MediaSort, search *string, status []*MediaStatus, interceptors ...clientv2.RequestInterceptor) (*SearchBaseManga, error) {
ac.logger.Debug().Msg("anilist: Searching manga")
return ac.Client.SearchBaseManga(ctx, page, perPage, sort, search, status, interceptors...)
}
func (ac *AnilistClientImpl) BaseMangaByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseMangaByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga")
return ac.Client.BaseMangaByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) MangaDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*MangaDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga details")
return ac.Client.MangaDetailsByID(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) ListManga(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, startDateGreater *string, startDateLesser *string, format *MediaFormat, countryOfOrigin *string, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListManga, error) {
ac.logger.Debug().Msg("anilist: Fetching manga list")
return ac.Client.ListManga(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, startDateGreater, startDateLesser, format, countryOfOrigin, isAdult, interceptors...)
}
func (ac *AnilistClientImpl) StudioDetails(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*StudioDetails, error) {
ac.logger.Debug().Int("studioId", *id).Msg("anilist: Fetching studio details")
return ac.Client.StudioDetails(ctx, id, interceptors...)
}
func (ac *AnilistClientImpl) SearchBaseAnimeByIds(ctx context.Context, ids []*int, page *int, perPage *int, status []*MediaStatus, inCollection *bool, sort []*MediaSort, season *MediaSeason, year *int, genre *string, format *MediaFormat, interceptors ...clientv2.RequestInterceptor) (*SearchBaseAnimeByIds, error) {
ac.logger.Debug().Msg("anilist: Searching anime by ids")
return ac.Client.SearchBaseAnimeByIds(ctx, ids, page, perPage, status, inCollection, sort, season, year, genre, format, interceptors...)
}
func (ac *AnilistClientImpl) AnimeAiringSchedule(ctx context.Context, ids []*int, season *MediaSeason, seasonYear *int, previousSeason *MediaSeason, previousSeasonYear *int, nextSeason *MediaSeason, nextSeasonYear *int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringSchedule, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.Client.AnimeAiringSchedule(ctx, ids, season, seasonYear, previousSeason, previousSeasonYear, nextSeason, nextSeasonYear, interceptors...)
}
func (ac *AnilistClientImpl) AnimeAiringScheduleRaw(ctx context.Context, ids []*int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringScheduleRaw, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.Client.AnimeAiringScheduleRaw(ctx, ids, interceptors...)
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var sentRateLimitWarningTime = time.Now().Add(-10 * time.Second)
// customDoFunc is a custom request interceptor function that handles rate limiting and retries.
func (ac *AnilistClientImpl) customDoFunc(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}) (err error) {
var rlRemainingStr string
reqTime := time.Now()
defer func() {
timeSince := time.Since(reqTime)
formattedDur := timeSince.Truncate(time.Millisecond).String()
if err != nil {
ac.logger.Error().Str("duration", formattedDur).Str("rlr", rlRemainingStr).Err(err).Msg("anilist: Failed Request")
} else {
if timeSince > 900*time.Millisecond {
ac.logger.Warn().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Successful Request (slow)")
} else {
ac.logger.Info().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Successful Request")
}
}
}()
client := http.DefaultClient
var resp *http.Response
retryCount := 2
for i := 0; i < retryCount; i++ {
// Reset response body for retry
if resp != nil && resp.Body != nil {
resp.Body.Close()
}
// Recreate the request body if it was read in a previous attempt
if req.GetBody != nil {
newBody, err := req.GetBody()
if err != nil {
return fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
rlRetryAfterStr := resp.Header.Get("Retry-After")
//println("Remaining:", rlRemainingStr, " | RetryAfter:", rlRetryAfterStr)
// If we have a rate limit, sleep for the time
rlRetryAfter, err := strconv.Atoi(rlRetryAfterStr)
if err == nil {
ac.logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", rlRetryAfter+1)
if time.Since(sentRateLimitWarningTime) > 10*time.Second {
events.GlobalWSEventManager.SendEvent(events.WarningToast, "anilist: Rate limited, retrying in "+strconv.Itoa(rlRetryAfter+1)+" seconds")
sentRateLimitWarningTime = time.Now()
}
select {
case <-time.After(time.Duration(rlRetryAfter+1) * time.Second):
continue
}
}
if rlRemainingStr == "" {
select {
case <-time.After(5 * time.Second):
continue
}
}
break
}
defer resp.Body.Close()
if resp.Header.Get("Content-Encoding") == "gzip" {
resp.Body, err = gzip.NewReader(resp.Body)
if err != nil {
return fmt.Errorf("gzip decode failed: %w", err)
}
}
var body []byte
body, err = io.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("failed to read response body: %w", err)
}
err = parseResponse(body, resp.StatusCode, res)
return
}
func parseResponse(body []byte, httpCode int, result interface{}) error {
errResponse := &clientv2.ErrorResponse{}
isKOCode := httpCode < 200 || 299 < httpCode
if isKOCode {
errResponse.NetworkError = &clientv2.HTTPError{
Code: httpCode,
Message: fmt.Sprintf("Response body %s", string(body)),
}
}
// some servers return a graphql error with a non OK http code, try anyway to parse the body
if err := unmarshal(body, result); err != nil {
var gqlErr *clientv2.GqlErrorList
if errors.As(err, &gqlErr) {
errResponse.GqlErrors = &gqlErr.Errors
} else if !isKOCode {
return err
}
}
if errResponse.HasErrors() {
return errResponse
}
return nil
}
// response is a GraphQL layer response from a handler.
type response struct {
Data json.RawMessage `json:"data"`
Errors json.RawMessage `json:"errors"`
}
func unmarshal(data []byte, res interface{}) error {
ParseDataWhenErrors := false
resp := response{}
if err := json.Unmarshal(data, &resp); err != nil {
return fmt.Errorf("failed to decode data %s: %w", string(data), err)
}
var err error
if resp.Errors != nil && len(resp.Errors) > 0 {
// try to parse standard graphql error
err = &clientv2.GqlErrorList{}
if e := json.Unmarshal(data, err); e != nil {
return fmt.Errorf("faild to parse graphql errors. Response content %s - %w", string(data), e)
}
// if ParseDataWhenErrors is true, try to parse data as well
if !ParseDataWhenErrors {
return err
}
}
if errData := graphqljson.UnmarshalData(resp.Data, res); errData != nil {
// if ParseDataWhenErrors is true, and we failed to unmarshal data, return the actual error
if ParseDataWhenErrors {
return err
}
return fmt.Errorf("failed to decode data into response %s: %w", string(data), errData)
}
return err
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,569 @@
package anilist
import (
"context"
"log"
"os"
"seanime/internal/test_utils"
"seanime/internal/util"
"github.com/Yamashou/gqlgenc/clientv2"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
// This file contains helper functions for testing the anilist package
func TestGetMockAnilistClient() AnilistClient {
return NewMockAnilistClient()
}
// MockAnilistClientImpl is a mock implementation of the AnilistClient, used for tests.
// It uses the real implementation of the AnilistClient to make requests then populates a cache with the results.
// This is to avoid making repeated requests to the AniList API during tests but still have realistic data.
type MockAnilistClientImpl struct {
realAnilistClient AnilistClient
logger *zerolog.Logger
}
func NewMockAnilistClient() *MockAnilistClientImpl {
return &MockAnilistClientImpl{
realAnilistClient: NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt),
logger: util.NewLogger(),
}
}
func (ac *MockAnilistClientImpl) IsAuthenticated() bool {
return ac.realAnilistClient.IsAuthenticated()
}
func (ac *MockAnilistClientImpl) BaseAnimeByMalID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByMalID, error) {
file, err := os.Open(test_utils.GetTestDataPath("BaseAnimeByMalID"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByMalID]: %d", *id)
ret, err := ac.realAnilistClient.BaseAnimeByMalID(context.Background(), id)
if err != nil {
return nil, err
}
data, err := json.Marshal([]*BaseAnimeByMalID{ret})
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByMalID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
}
var media []*BaseAnimeByMalID
err = json.NewDecoder(file).Decode(&media)
if err != nil {
log.Fatal(err)
}
var ret *BaseAnimeByMalID
for _, m := range media {
if m.GetMedia().ID == *id {
ret = m
break
}
}
if ret == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByMalID]: %d", *id)
ret, err := ac.realAnilistClient.BaseAnimeByMalID(context.Background(), id)
if err != nil {
return nil, err
}
media = append(media, ret)
data, err := json.Marshal(media)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByMalID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [BaseAnimeByMalID]: %d", *id)
return ret, nil
}
func (ac *MockAnilistClientImpl) BaseAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseAnimeByID, error) {
file, err := os.Open(test_utils.GetTestDataPath("BaseAnimeByID"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByID]: %d", *id)
baseAnime, err := ac.realAnilistClient.BaseAnimeByID(context.Background(), id)
if err != nil {
return nil, err
}
data, err := json.Marshal([]*BaseAnimeByID{baseAnime})
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return baseAnime, nil
}
}
var media []*BaseAnimeByID
err = json.NewDecoder(file).Decode(&media)
if err != nil {
log.Fatal(err)
}
var baseAnime *BaseAnimeByID
for _, m := range media {
if m.GetMedia().ID == *id {
baseAnime = m
break
}
}
if baseAnime == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [BaseAnimeByID]: %d", *id)
baseAnime, err := ac.realAnilistClient.BaseAnimeByID(context.Background(), id)
if err != nil {
return nil, err
}
media = append(media, baseAnime)
data, err := json.Marshal(media)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("BaseAnimeByID"), data, 0644)
if err != nil {
log.Fatal(err)
}
return baseAnime, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [BaseAnimeByID]: %d", *id)
return baseAnime, nil
}
// AnimeCollection
// - Set userName to nil to use the boilerplate AnimeCollection
// - Set userName to a specific username to fetch and cache
func (ac *MockAnilistClientImpl) AnimeCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollection, error) {
if userName == nil {
file, err := os.Open(test_utils.GetDataPath("BoilerplateAnimeCollection"))
defer file.Close()
var ret *AnimeCollection
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: Using [BoilerplateAnimeCollection]")
return ret, nil
}
file, err := os.Open(test_utils.GetTestDataPath("AnimeCollection"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollection]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollection(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollection"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
}
var ret *AnimeCollection
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
if ret == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollection]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollection(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollection"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [AnimeCollection]: %s", *userName)
return ret, nil
}
func (ac *MockAnilistClientImpl) AnimeCollectionWithRelations(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*AnimeCollectionWithRelations, error) {
if userName == nil {
file, err := os.Open(test_utils.GetDataPath("BoilerplateAnimeCollectionWithRelations"))
defer file.Close()
var ret *AnimeCollectionWithRelations
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: Using [BoilerplateAnimeCollectionWithRelations]")
return ret, nil
}
file, err := os.Open(test_utils.GetTestDataPath("AnimeCollectionWithRelations"))
defer file.Close()
if err != nil {
if os.IsNotExist(err) {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollectionWithRelations]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollectionWithRelations(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollectionWithRelations"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
}
var ret *AnimeCollectionWithRelations
err = json.NewDecoder(file).Decode(&ret)
if err != nil {
log.Fatal(err)
}
if ret == nil {
ac.logger.Warn().Msgf("MockAnilistClientImpl: CACHE MISS [AnimeCollectionWithRelations]: %s", *userName)
ret, err := ac.realAnilistClient.AnimeCollectionWithRelations(context.Background(), userName)
if err != nil {
return nil, err
}
data, err := json.Marshal(ret)
if err != nil {
log.Fatal(err)
}
err = os.WriteFile(test_utils.GetTestDataPath("AnimeCollectionWithRelations"), data, 0644)
if err != nil {
log.Fatal(err)
}
return ret, nil
}
ac.logger.Trace().Msgf("MockAnilistClientImpl: CACHE HIT [AnimeCollectionWithRelations]: %s", *userName)
return ret, nil
}
type TestModifyAnimeCollectionEntryInput struct {
Status *MediaListStatus
Progress *int
Score *float64
AiredEpisodes *int
NextAiringEpisode *BaseAnime_NextAiringEpisode
}
// TestModifyAnimeCollectionEntry will modify an entry in the fetched anime collection.
// This is used to fine-tune the anime collection for testing purposes.
//
// Example: Setting a specific progress in case the origin anime collection has no progress
func TestModifyAnimeCollectionEntry(ac *AnimeCollection, mId int, input TestModifyAnimeCollectionEntryInput) *AnimeCollection {
if ac == nil {
panic("AnimeCollection is nil")
}
lists := ac.GetMediaListCollection().GetLists()
removedFromList := false
var rEntry *AnimeCollection_MediaListCollection_Lists_Entries
// Move the entry to the correct list
if input.Status != nil {
for _, list := range lists {
if list.Status == nil || list.Entries == nil {
continue
}
entries := list.GetEntries()
for idx, entry := range entries {
if entry.GetMedia().ID == mId {
// Remove from current list if status differs
if *list.Status != *input.Status {
removedFromList = true
rEntry = entry
// Ensure we're not going out of bounds
if idx >= 0 && idx < len(entries) {
// Safely remove the entry by re-slicing
list.Entries = append(entries[:idx], entries[idx+1:]...)
}
break
}
}
}
}
// Add the entry to the correct list if it was removed
if removedFromList && rEntry != nil {
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.Status == *input.Status {
if list.Entries == nil {
list.Entries = make([]*AnimeCollection_MediaListCollection_Lists_Entries, 0)
}
// Add the removed entry to the new list
list.Entries = append(list.Entries, rEntry)
break
}
}
}
}
// Update the entry details
out:
for _, list := range lists {
entries := list.GetEntries()
for _, entry := range entries {
if entry.GetMedia().ID == mId {
if input.Status != nil {
entry.Status = input.Status
}
if input.Progress != nil {
entry.Progress = input.Progress
}
if input.Score != nil {
entry.Score = input.Score
}
if input.AiredEpisodes != nil {
entry.Media.Episodes = input.AiredEpisodes
}
if input.NextAiringEpisode != nil {
entry.Media.NextAiringEpisode = input.NextAiringEpisode
}
break out
}
}
}
return ac
}
func TestAddAnimeCollectionEntry(ac *AnimeCollection, mId int, input TestModifyAnimeCollectionEntryInput, realClient AnilistClient) *AnimeCollection {
if ac == nil {
panic("AnimeCollection is nil")
}
// Fetch the anime details
baseAnime, err := realClient.BaseAnimeByID(context.Background(), &mId)
if err != nil {
log.Fatal(err)
}
anime := baseAnime.GetMedia()
if input.NextAiringEpisode != nil {
anime.NextAiringEpisode = input.NextAiringEpisode
}
if input.AiredEpisodes != nil {
anime.Episodes = input.AiredEpisodes
}
lists := ac.GetMediaListCollection().GetLists()
// Add the entry to the correct list
if input.Status != nil {
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.Status == *input.Status {
if list.Entries == nil {
list.Entries = make([]*AnimeCollection_MediaListCollection_Lists_Entries, 0)
}
list.Entries = append(list.Entries, &AnimeCollection_MediaListCollection_Lists_Entries{
Media: baseAnime.GetMedia(),
Status: input.Status,
Progress: input.Progress,
Score: input.Score,
})
break
}
}
}
return ac
}
func TestAddAnimeCollectionWithRelationsEntry(ac *AnimeCollectionWithRelations, mId int, input TestModifyAnimeCollectionEntryInput, realClient AnilistClient) *AnimeCollectionWithRelations {
if ac == nil {
panic("AnimeCollection is nil")
}
// Fetch the anime details
baseAnime, err := realClient.CompleteAnimeByID(context.Background(), &mId)
if err != nil {
log.Fatal(err)
}
anime := baseAnime.GetMedia()
//if input.NextAiringEpisode != nil {
// anime.NextAiringEpisode = input.NextAiringEpisode
//}
if input.AiredEpisodes != nil {
anime.Episodes = input.AiredEpisodes
}
lists := ac.GetMediaListCollection().GetLists()
// Add the entry to the correct list
if input.Status != nil {
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.Status == *input.Status {
if list.Entries == nil {
list.Entries = make([]*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, 0)
}
list.Entries = append(list.Entries, &AnimeCollectionWithRelations_MediaListCollection_Lists_Entries{
Media: baseAnime.GetMedia(),
Status: input.Status,
Progress: input.Progress,
Score: input.Score,
})
break
}
}
}
return ac
}
//
// WILL NOT IMPLEMENT
//
func (ac *MockAnilistClientImpl) UpdateMediaListEntry(ctx context.Context, mediaID *int, status *MediaListStatus, scoreRaw *int, progress *int, startedAt *FuzzyDateInput, completedAt *FuzzyDateInput, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntry, error) {
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry")
return &UpdateMediaListEntry{}, nil
}
func (ac *MockAnilistClientImpl) UpdateMediaListEntryProgress(ctx context.Context, mediaID *int, progress *int, status *MediaListStatus, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryProgress, error) {
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry progress")
return &UpdateMediaListEntryProgress{}, nil
}
func (ac *MockAnilistClientImpl) UpdateMediaListEntryRepeat(ctx context.Context, mediaID *int, repeat *int, interceptors ...clientv2.RequestInterceptor) (*UpdateMediaListEntryRepeat, error) {
ac.logger.Debug().Int("mediaId", *mediaID).Msg("anilist: Updating media list entry repeat")
return &UpdateMediaListEntryRepeat{}, nil
}
func (ac *MockAnilistClientImpl) DeleteEntry(ctx context.Context, mediaListEntryID *int, interceptors ...clientv2.RequestInterceptor) (*DeleteEntry, error) {
ac.logger.Debug().Int("entryId", *mediaListEntryID).Msg("anilist: Deleting media list entry")
return &DeleteEntry{}, nil
}
func (ac *MockAnilistClientImpl) AnimeDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*AnimeDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching anime details")
return ac.realAnilistClient.AnimeDetailsByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) CompleteAnimeByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*CompleteAnimeByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching complete media")
return ac.realAnilistClient.CompleteAnimeByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) ListAnime(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, season *MediaSeason, seasonYear *int, format *MediaFormat, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching media list")
return ac.realAnilistClient.ListAnime(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, season, seasonYear, format, isAdult, interceptors...)
}
func (ac *MockAnilistClientImpl) ListRecentAnime(ctx context.Context, page *int, perPage *int, airingAtGreater *int, airingAtLesser *int, notYetAired *bool, interceptors ...clientv2.RequestInterceptor) (*ListRecentAnime, error) {
ac.logger.Debug().Msg("anilist: Fetching recent media list")
return ac.realAnilistClient.ListRecentAnime(ctx, page, perPage, airingAtGreater, airingAtLesser, notYetAired, interceptors...)
}
func (ac *MockAnilistClientImpl) GetViewer(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*GetViewer, error) {
ac.logger.Debug().Msg("anilist: Fetching viewer")
return ac.realAnilistClient.GetViewer(ctx, interceptors...)
}
func (ac *MockAnilistClientImpl) MangaCollection(ctx context.Context, userName *string, interceptors ...clientv2.RequestInterceptor) (*MangaCollection, error) {
ac.logger.Debug().Msg("anilist: Fetching manga collection")
return ac.realAnilistClient.MangaCollection(ctx, userName, interceptors...)
}
func (ac *MockAnilistClientImpl) SearchBaseManga(ctx context.Context, page *int, perPage *int, sort []*MediaSort, search *string, status []*MediaStatus, interceptors ...clientv2.RequestInterceptor) (*SearchBaseManga, error) {
ac.logger.Debug().Msg("anilist: Searching manga")
return ac.realAnilistClient.SearchBaseManga(ctx, page, perPage, sort, search, status, interceptors...)
}
func (ac *MockAnilistClientImpl) BaseMangaByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*BaseMangaByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga")
return ac.realAnilistClient.BaseMangaByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) MangaDetailsByID(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*MangaDetailsByID, error) {
ac.logger.Debug().Int("mediaId", *id).Msg("anilist: Fetching manga details")
return ac.realAnilistClient.MangaDetailsByID(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) ListManga(ctx context.Context, page *int, search *string, perPage *int, sort []*MediaSort, status []*MediaStatus, genres []*string, averageScoreGreater *int, startDateGreater *string, startDateLesser *string, format *MediaFormat, countryOfOrigin *string, isAdult *bool, interceptors ...clientv2.RequestInterceptor) (*ListManga, error) {
ac.logger.Debug().Msg("anilist: Fetching manga list")
return ac.realAnilistClient.ListManga(ctx, page, search, perPage, sort, status, genres, averageScoreGreater, startDateGreater, startDateLesser, format, countryOfOrigin, isAdult, interceptors...)
}
func (ac *MockAnilistClientImpl) StudioDetails(ctx context.Context, id *int, interceptors ...clientv2.RequestInterceptor) (*StudioDetails, error) {
ac.logger.Debug().Int("studioId", *id).Msg("anilist: Fetching studio details")
return ac.realAnilistClient.StudioDetails(ctx, id, interceptors...)
}
func (ac *MockAnilistClientImpl) ViewerStats(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ViewerStats, error) {
ac.logger.Debug().Msg("anilist: Fetching stats")
return ac.realAnilistClient.ViewerStats(ctx, interceptors...)
}
func (ac *MockAnilistClientImpl) SearchBaseAnimeByIds(ctx context.Context, ids []*int, page *int, perPage *int, status []*MediaStatus, inCollection *bool, sort []*MediaSort, season *MediaSeason, year *int, genre *string, format *MediaFormat, interceptors ...clientv2.RequestInterceptor) (*SearchBaseAnimeByIds, error) {
ac.logger.Debug().Msg("anilist: Searching anime by ids")
return ac.realAnilistClient.SearchBaseAnimeByIds(ctx, ids, page, perPage, status, inCollection, sort, season, year, genre, format, interceptors...)
}
func (ac *MockAnilistClientImpl) AnimeAiringSchedule(ctx context.Context, ids []*int, season *MediaSeason, seasonYear *int, previousSeason *MediaSeason, previousSeasonYear *int, nextSeason *MediaSeason, nextSeasonYear *int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringSchedule, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.realAnilistClient.AnimeAiringSchedule(ctx, ids, season, seasonYear, previousSeason, previousSeasonYear, nextSeason, nextSeasonYear, interceptors...)
}
func (ac *MockAnilistClientImpl) AnimeAiringScheduleRaw(ctx context.Context, ids []*int, interceptors ...clientv2.RequestInterceptor) (*AnimeAiringScheduleRaw, error) {
ac.logger.Debug().Msg("anilist: Fetching schedule")
return ac.realAnilistClient.AnimeAiringScheduleRaw(ctx, ids, interceptors...)
}

View File

@@ -0,0 +1,73 @@
package anilist
import (
"context"
"github.com/goccy/go-json"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"os"
"seanime/internal/test_utils"
"testing"
)
// USE CASE: Generate a boilerplate Anilist AnimeCollection for testing purposes and save it to 'test/data/BoilerplateAnimeCollection'.
// The generated AnimeCollection will have all entries in the 'Planning' status.
// The generated AnimeCollection will be used to test various Anilist API methods.
// You can use TestModifyAnimeCollectionEntry to modify the generated AnimeCollection before using it in a test.
// - DO NOT RUN IF YOU DON'T PLAN TO GENERATE A NEW 'test/data/BoilerplateAnimeCollection'
func TestGenerateBoilerplateAnimeCollection(t *testing.T) {
t.Skip("This test is not meant to be run")
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
ac, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
if assert.NoError(t, err) {
lists := ac.GetMediaListCollection().GetLists()
entriesToAddToPlanning := make([]*AnimeListEntry, 0)
if assert.NoError(t, err) {
for _, list := range lists {
if list.Status != nil {
if list.GetStatus().String() != string(MediaListStatusPlanning) {
entries := list.GetEntries()
for _, entry := range entries {
entry.Progress = lo.ToPtr(0)
entry.Score = lo.ToPtr(0.0)
entry.Status = lo.ToPtr(MediaListStatusPlanning)
entriesToAddToPlanning = append(entriesToAddToPlanning, entry)
}
list.Entries = make([]*AnimeListEntry, 0)
}
}
}
newLists := make([]*AnimeCollection_MediaListCollection_Lists, 0)
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.GetStatus() == MediaListStatusPlanning {
list.Entries = append(list.Entries, entriesToAddToPlanning...)
newLists = append(newLists, list)
} else {
newLists = append(newLists, list)
}
}
ac.MediaListCollection.Lists = newLists
data, err := json.Marshal(ac)
if assert.NoError(t, err) {
err = os.WriteFile(test_utils.GetDataPath("BoilerplateAnimeCollection"), data, 0644)
assert.NoError(t, err)
}
}
}
}

View File

@@ -0,0 +1,237 @@
package anilist
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/samber/lo"
"github.com/stretchr/testify/assert"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
//func TestHiddenFromStatus(t *testing.T) {
// test_utils.InitTestProvider(t, test_utils.Anilist())
//
// token := test_utils.ConfigData.Provider.AnilistJwt
// logger := util.NewLogger()
// //anilistClient := NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt)
//
// variables := map[string]interface{}{}
//
// variables["userName"] = test_utils.ConfigData.Provider.AnilistUsername
// variables["type"] = "ANIME"
//
// requestBody, err := json.Marshal(map[string]interface{}{
// "query": testQuery,
// "variables": variables,
// })
// require.NoError(t, err)
//
// data, err := customQuery(requestBody, logger, token)
// require.NoError(t, err)
//
// var mediaLists []*MediaList
//
// type retData struct {
// Page Page
// PageInfo PageInfo
// }
//
// var ret retData
// m, err := json.Marshal(data)
// require.NoError(t, err)
// if err := json.Unmarshal(m, &ret); err != nil {
// t.Fatalf("Failed to unmarshal data: %v", err)
// }
//
// mediaLists = append(mediaLists, ret.Page.MediaList...)
//
// util.Spew(ret.Page.PageInfo)
//
// var currentPage = 1
// var hasNextPage = false
// if ret.Page.PageInfo != nil && ret.Page.PageInfo.HasNextPage != nil {
// hasNextPage = *ret.Page.PageInfo.HasNextPage
// }
// for hasNextPage {
// currentPage++
// variables["page"] = currentPage
// requestBody, err = json.Marshal(map[string]interface{}{
// "query": testQuery,
// "variables": variables,
// })
// require.NoError(t, err)
// data, err = customQuery(requestBody, logger, token)
// require.NoError(t, err)
// m, err = json.Marshal(data)
// require.NoError(t, err)
// if err := json.Unmarshal(m, &ret); err != nil {
// t.Fatalf("Failed to unmarshal data: %v", err)
// }
// util.Spew(ret.Page.PageInfo)
// if ret.Page.PageInfo != nil && ret.Page.PageInfo.HasNextPage != nil {
// hasNextPage = *ret.Page.PageInfo.HasNextPage
// }
// mediaLists = append(mediaLists, ret.Page.MediaList...)
// }
//
// //res, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
// //assert.NoError(t, err)
//
// for _, mediaList := range mediaLists {
// util.Spew(mediaList.Media.ID)
// if mediaList.Media.ID == 151514 {
// util.Spew(mediaList)
// }
// }
//
//}
//
//const testQuery = `query ($page: Int, $userName: String, $type: MediaType) {
// Page (page: $page, perPage: 100) {
// pageInfo {
// hasNextPage
// total
// perPage
// currentPage
// lastPage
// }
// mediaList (type: $type, userName: $userName) {
// status
// startedAt {
// year
// month
// day
// }
// completedAt {
// year
// month
// day
// }
// repeat
// score(format: POINT_100)
// progress
// progressVolumes
// notes
// media {
// siteUrl
// id
// idMal
// episodes
// chapters
// volumes
// status
// averageScore
// coverImage{
// large
// extraLarge
// }
// bannerImage
// title {
// userPreferred
// }
// }
// }
// }
// }`
func TestGetAnimeById(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
tests := []struct {
name string
mediaId int
}{
{
name: "Cowboy Bebop",
mediaId: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res, err := anilistClient.BaseAnimeByID(context.Background(), &tt.mediaId)
assert.NoError(t, err)
assert.NotNil(t, res)
})
}
}
func TestListAnime(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
tests := []struct {
name string
Page *int
Search *string
PerPage *int
Sort []*MediaSort
Status []*MediaStatus
Genres []*string
AverageScoreGreater *int
Season *MediaSeason
SeasonYear *int
Format *MediaFormat
IsAdult *bool
}{
{
name: "Popular",
Page: lo.ToPtr(1),
Search: nil,
PerPage: lo.ToPtr(20),
Sort: []*MediaSort{lo.ToPtr(MediaSortTrendingDesc)},
Status: nil,
Genres: nil,
AverageScoreGreater: nil,
Season: nil,
SeasonYear: nil,
Format: nil,
IsAdult: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
cacheKey := ListAnimeCacheKey(
tt.Page,
tt.Search,
tt.PerPage,
tt.Sort,
tt.Status,
tt.Genres,
tt.AverageScoreGreater,
tt.Season,
tt.SeasonYear,
tt.Format,
tt.IsAdult,
)
t.Log(cacheKey)
res, err := ListAnimeM(
tt.Page,
tt.Search,
tt.PerPage,
tt.Sort,
tt.Status,
tt.Genres,
tt.AverageScoreGreater,
tt.Season,
tt.SeasonYear,
tt.Format,
tt.IsAdult,
util.NewLogger(),
"",
)
assert.NoError(t, err)
assert.Equal(t, *tt.PerPage, len(res.GetPage().GetMedia()))
spew.Dump(res)
})
}
}

View File

@@ -0,0 +1,248 @@
package anilist
import (
"time"
"github.com/goccy/go-json"
)
type (
AnimeListEntry = AnimeCollection_MediaListCollection_Lists_Entries
AnimeList = AnimeCollection_MediaListCollection_Lists
EntryDate struct {
Year *int `json:"year,omitempty"`
Month *int `json:"month,omitempty"`
Day *int `json:"day,omitempty"`
}
)
func (ac *AnimeCollection) GetListEntryFromAnimeId(id int) (*AnimeListEntry, bool) {
if ac == nil || ac.MediaListCollection == nil {
return nil, false
}
var entry *AnimeCollection_MediaListCollection_Lists_Entries
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == id {
entry = e
break
}
}
}
if entry == nil {
return nil, false
}
return entry, true
}
func (ac *AnimeCollection) GetAllAnime() []*BaseAnime {
if ac == nil {
return make([]*BaseAnime, 0)
}
var ret []*BaseAnime
addedId := make(map[int]bool)
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if _, ok := addedId[e.Media.ID]; !ok {
ret = append(ret, e.Media)
addedId[e.Media.ID] = true
}
}
}
return ret
}
func (ac *AnimeCollection) FindAnime(mediaId int) (*BaseAnime, bool) {
if ac == nil {
return nil, false
}
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == mediaId {
return e.Media, true
}
}
}
return nil, false
}
func (ac *AnimeCollectionWithRelations) GetListEntryFromMediaId(id int) (*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, bool) {
if ac == nil || ac.MediaListCollection == nil {
return nil, false
}
var entry *AnimeCollectionWithRelations_MediaListCollection_Lists_Entries
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == id {
entry = e
break
}
}
}
if entry == nil {
return nil, false
}
return entry, true
}
func (ac *AnimeCollectionWithRelations) GetAllAnime() []*CompleteAnime {
var ret []*CompleteAnime
addedId := make(map[int]bool)
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if _, ok := addedId[e.Media.ID]; !ok {
ret = append(ret, e.Media)
addedId[e.Media.ID] = true
}
}
}
return ret
}
func (ac *AnimeCollectionWithRelations) FindAnime(mediaId int) (*CompleteAnime, bool) {
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == mediaId {
return e.Media, true
}
}
}
return nil, false
}
type IFuzzyDate interface {
GetYear() *int
GetMonth() *int
GetDay() *int
}
func FuzzyDateToString(d IFuzzyDate) string {
if d == nil {
return ""
}
return fuzzyDateToString(d.GetYear(), d.GetMonth(), d.GetDay())
}
func ToEntryStartDate(d *AnimeCollection_MediaListCollection_Lists_Entries_StartedAt) string {
if d == nil {
return ""
}
return fuzzyDateToString(d.GetYear(), d.GetMonth(), d.GetDay())
}
func ToEntryCompletionDate(d *AnimeCollection_MediaListCollection_Lists_Entries_CompletedAt) string {
if d == nil {
return ""
}
return fuzzyDateToString(d.GetYear(), d.GetMonth(), d.GetDay())
}
func fuzzyDateToString(year *int, month *int, day *int) string {
_year := 0
if year != nil {
_year = *year
}
if _year == 0 {
return ""
}
_month := 0
if month != nil {
_month = *month
}
_day := 0
if day != nil {
_day = *day
}
return time.Date(_year, time.Month(_month), _day, 0, 0, 0, 0, time.UTC).Format(time.RFC3339)
}
// AddEntryToList adds an entry to the appropriate list based on the provided status.
// If no list exists with the given status, a new list is created.
func (mc *AnimeCollection_MediaListCollection) AddEntryToList(entry *AnimeCollection_MediaListCollection_Lists_Entries, status MediaListStatus) {
if mc == nil || entry == nil {
return
}
// Initialize Lists slice if nil
if mc.Lists == nil {
mc.Lists = make([]*AnimeCollection_MediaListCollection_Lists, 0)
}
// Find existing list with the target status
for _, list := range mc.Lists {
if list.Status != nil && *list.Status == status {
// Found the list, add the entry
if list.Entries == nil {
list.Entries = make([]*AnimeCollection_MediaListCollection_Lists_Entries, 0)
}
list.Entries = append(list.Entries, entry)
return
}
}
// No list found with the target status, create a new one
newList := &AnimeCollection_MediaListCollection_Lists{
Status: &status,
Entries: []*AnimeCollection_MediaListCollection_Lists_Entries{entry},
}
mc.Lists = append(mc.Lists, newList)
}
func (ac *AnimeCollection) Copy() *AnimeCollection {
if ac == nil {
return nil
}
marshaled, err := json.Marshal(ac)
if err != nil {
return nil
}
var copy AnimeCollection
err = json.Unmarshal(marshaled, &copy)
if err != nil {
return nil
}
return &copy
}
func (ac *AnimeList) CopyT() *AnimeCollection_MediaListCollection_Lists {
if ac == nil {
return nil
}
marshaled, err := json.Marshal(ac)
if err != nil {
return nil
}
var copy AnimeCollection_MediaListCollection_Lists
err = json.Unmarshal(marshaled, &copy)
if err != nil {
return nil
}
return &copy
}

View File

@@ -0,0 +1,115 @@
package anilist
import (
"fmt"
"github.com/goccy/go-json"
"seanime/internal/util"
"strconv"
)
func FetchBaseAnimeMap(ids []int) (ret map[int]*BaseAnime, err error) {
query := fmt.Sprintf(CompoundBaseAnimeDocument, newCompoundQuery(ids))
requestBody, err := json.Marshal(map[string]interface{}{
"query": query,
"variables": nil,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, util.NewLogger())
if err != nil {
return nil, err
}
var res map[string]*BaseAnime
dataB, err := json.Marshal(data)
if err != nil {
return nil, err
}
err = json.Unmarshal(dataB, &res)
if err != nil {
return nil, err
}
ret = make(map[int]*BaseAnime)
for k, v := range res {
id, err := strconv.Atoi(k[1:])
if err != nil {
return nil, err
}
ret[id] = v
}
return ret, nil
}
func newCompoundQuery(ids []int) string {
var query string
for _, id := range ids {
query += fmt.Sprintf(`
t%d: Media(id: %d) {
...baseAnime
}
`, id, id)
}
return query
}
const CompoundBaseAnimeDocument = `query CompoundQueryTest {
%s
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}`

View File

@@ -0,0 +1,95 @@
package anilist
import (
"fmt"
"github.com/davecgh/go-spew/spew"
"github.com/goccy/go-json"
"github.com/stretchr/testify/require"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestCompoundQuery(t *testing.T) {
test_utils.InitTestProvider(t)
var ids = []int{171457, 21}
query := fmt.Sprintf(compoundQueryFormatTest, newCompoundQuery(ids))
t.Log(query)
requestBody, err := json.Marshal(map[string]interface{}{
"query": query,
"variables": nil,
})
require.NoError(t, err)
data, err := customQuery(requestBody, util.NewLogger())
require.NoError(t, err)
var res map[string]*BaseAnime
dataB, err := json.Marshal(data)
require.NoError(t, err)
err = json.Unmarshal(dataB, &res)
require.NoError(t, err)
spew.Dump(res)
}
const compoundQueryFormatTest = `query CompoundQueryTest {
%s
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}`

View File

@@ -0,0 +1,140 @@
package anilist
import (
"bytes"
"compress/gzip"
"errors"
"fmt"
"net/http"
"seanime/internal/util"
"strconv"
"time"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
)
func CustomQuery(body map[string]interface{}, logger *zerolog.Logger, token string) (data interface{}, err error) {
bodyBytes, err := json.Marshal(body)
if err != nil {
return nil, err
}
return customQuery(bodyBytes, logger, token)
}
func customQuery(body []byte, logger *zerolog.Logger, token ...string) (data interface{}, err error) {
var rlRemainingStr string
reqTime := time.Now()
defer func() {
timeSince := time.Since(reqTime)
formattedDur := timeSince.Truncate(time.Millisecond).String()
if err != nil {
logger.Error().Str("duration", formattedDur).Str("rlr", rlRemainingStr).Err(err).Msg("anilist: Failed Request")
} else {
if timeSince > 600*time.Millisecond {
logger.Warn().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Long Request")
} else {
logger.Trace().Str("rtt", formattedDur).Str("rlr", rlRemainingStr).Msg("anilist: Successful Request")
}
}
}()
defer util.HandlePanicInModuleThen("api/anilist/custom_query", func() {
err = errors.New("panic in customQuery")
})
client := http.DefaultClient
var req *http.Request
req, err = http.NewRequest("POST", "https://graphql.anilist.co", bytes.NewBuffer(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
if len(token) > 0 && token[0] != "" {
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token[0]))
}
// Send request
retryCount := 2
var resp *http.Response
for i := 0; i < retryCount; i++ {
// Reset response body for retry
if resp != nil && resp.Body != nil {
resp.Body.Close()
}
// Recreate the request body if it was read in a previous attempt
if req.GetBody != nil {
newBody, err := req.GetBody()
if err != nil {
return nil, fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
rlRetryAfterStr := resp.Header.Get("Retry-After")
rlRetryAfter, err := strconv.Atoi(rlRetryAfterStr)
if err == nil {
logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", rlRetryAfter+1)
select {
case <-time.After(time.Duration(rlRetryAfter+1) * time.Second):
continue
}
}
if rlRemainingStr == "" {
select {
case <-time.After(5 * time.Second):
continue
}
}
break
}
defer resp.Body.Close()
if resp.Header.Get("Content-Encoding") == "gzip" {
resp.Body, err = gzip.NewReader(resp.Body)
if err != nil {
return nil, fmt.Errorf("gzip decode failed: %w", err)
}
}
var res interface{}
err = json.NewDecoder(resp.Body).Decode(&res)
if err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
var ok bool
reqErrors, ok := res.(map[string]interface{})["errors"].([]interface{})
if ok && len(reqErrors) > 0 {
firstError, foundErr := reqErrors[0].(map[string]interface{})
if foundErr {
return nil, errors.New(firstError["message"].(string))
}
}
data, ok = res.(map[string]interface{})["data"]
if !ok {
return nil, errors.New("failed to parse data")
}
return data, nil
}

View File

@@ -0,0 +1,27 @@
package anilist
//import (
//)
//
//func TestFuzzyDate(t *testing.T) {
//
// date := "2006-01-02T15:04:05Z"
//
// parsedDate, err := time.Parse(time.RFC3339, date)
// if err != nil {
// t.Fatal(err)
// }
//
// year := parsedDate.Year()
// month := int(parsedDate.Month())
// day := parsedDate.Day()
// t.Logf("Year: %d, Month: %d, Day: %d", year, month, day)
//
//}
//
//func TestDateTransformation(t *testing.T) {
//
// t.Logf(time.Date(2024, time.Month(1), 1, 0, 0, 0, 0, time.Local).UTC().Format(time.RFC3339))
//
//}

View File

@@ -0,0 +1,50 @@
package anilist
import (
"context"
"errors"
"github.com/rs/zerolog"
"seanime/internal/util/limiter"
"sync"
)
func (c *Client) AddMediaToPlanning(mIds []int, rateLimiter *limiter.Limiter, logger *zerolog.Logger) error {
if len(mIds) == 0 {
logger.Debug().Msg("anilist: No media added to planning list")
return nil
}
if rateLimiter == nil {
return errors.New("anilist: no rate limiter provided")
}
status := MediaListStatusPlanning
scoreRaw := 0
progress := 0
wg := sync.WaitGroup{}
for _, _id := range mIds {
wg.Add(1)
go func(id int) {
rateLimiter.Wait()
defer wg.Done()
_, err := c.UpdateMediaListEntry(
context.Background(),
&id,
&status,
&scoreRaw,
&progress,
nil,
nil,
)
if err != nil {
logger.Error().Msg("anilist: An error occurred while adding media to planning list: " + err.Error())
}
}(_id)
}
wg.Wait()
logger.Debug().Any("count", len(mIds)).Msg("anilist: Media added to planning list")
return nil
}

View File

@@ -0,0 +1,19 @@
package anilist
import "seanime/internal/hook_resolver"
// ListMissedSequelsRequestedEvent is triggered when the list missed sequels request is requested.
// Prevent default to skip the default behavior and return your own data.
type ListMissedSequelsRequestedEvent struct {
hook_resolver.Event
AnimeCollectionWithRelations *AnimeCollectionWithRelations `json:"animeCollectionWithRelations"`
Variables map[string]interface{} `json:"variables"`
Query string `json:"query"`
// Empty data object, will be used if the hook prevents the default behavior
List []*BaseAnime `json:"list"`
}
type ListMissedSequelsEvent struct {
hook_resolver.Event
List []*BaseAnime `json:"list"`
}

View File

@@ -0,0 +1,529 @@
package anilist
import (
"fmt"
"seanime/internal/hook"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
"github.com/samber/lo"
)
func ListMissedSequels(
animeCollectionWithRelations *AnimeCollectionWithRelations,
logger *zerolog.Logger,
token string,
) (ret []*BaseAnime, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("panic: %v", r)
}
}()
variables := map[string]interface{}{}
variables["page"] = 1
variables["perPage"] = 50
ids := make(map[int]struct{})
for _, list := range animeCollectionWithRelations.GetMediaListCollection().GetLists() {
if list.Status == nil || !(*list.Status == MediaListStatusCompleted || *list.Status == MediaListStatusRepeating || *list.Status == MediaListStatusPaused) || list.Entries == nil {
continue
}
for _, entry := range list.Entries {
if _, ok := ids[entry.GetMedia().GetID()]; !ok {
edges := entry.GetMedia().GetRelations().GetEdges()
var sequel *BaseAnime
for _, edge := range edges {
if edge.GetRelationType() != nil && *edge.GetRelationType() == MediaRelationSequel {
sequel = edge.GetNode()
break
}
}
if sequel == nil {
continue
}
// Check if sequel is already in the list
_, found := animeCollectionWithRelations.FindAnime(sequel.GetID())
if found {
continue
}
if *sequel.GetStatus() == MediaStatusFinished || *sequel.GetStatus() == MediaStatusReleasing {
ids[sequel.GetID()] = struct{}{}
}
}
}
}
idsSlice := make([]int, 0, len(ids))
for id := range ids {
idsSlice = append(idsSlice, id)
}
if len(idsSlice) == 0 {
return []*BaseAnime{}, nil
}
if len(idsSlice) > 10 {
idsSlice = idsSlice[:10]
}
variables["ids"] = idsSlice
variables["inCollection"] = false
variables["sort"] = MediaSortStartDateDesc
// Event
reqEvent := &ListMissedSequelsRequestedEvent{
AnimeCollectionWithRelations: animeCollectionWithRelations,
Variables: variables,
List: make([]*BaseAnime, 0),
Query: SearchBaseAnimeByIdsDocument,
}
err = hook.GlobalHookManager.OnListMissedSequelsRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if reqEvent.DefaultPrevented {
return reqEvent.List, nil
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": reqEvent.Query,
"variables": reqEvent.Variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
var searchRes *SearchBaseAnimeByIds
if err := json.Unmarshal(m, &searchRes); err != nil {
return nil, err
}
if searchRes == nil || searchRes.Page == nil || searchRes.Page.Media == nil {
return nil, fmt.Errorf("no data found")
}
// Event
event := &ListMissedSequelsEvent{
List: searchRes.Page.Media,
}
err = hook.GlobalHookManager.OnListMissedSequels().Trigger(event)
if err != nil {
return nil, err
}
return event.List, nil
}
func ListAnimeM(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Season *MediaSeason,
SeasonYear *int,
Format *MediaFormat,
IsAdult *bool,
logger *zerolog.Logger,
token string,
) (*ListAnime, error) {
variables := map[string]interface{}{}
if Page != nil {
variables["page"] = *Page
}
if Search != nil {
variables["search"] = *Search
}
if PerPage != nil {
variables["perPage"] = *PerPage
}
if Sort != nil {
variables["sort"] = Sort
}
if Status != nil {
variables["status"] = Status
}
if Genres != nil {
variables["genres"] = Genres
}
if AverageScoreGreater != nil {
variables["averageScore_greater"] = *AverageScoreGreater
}
if Season != nil {
variables["season"] = *Season
}
if SeasonYear != nil {
variables["seasonYear"] = *SeasonYear
}
if Format != nil {
variables["format"] = *Format
}
if IsAdult != nil {
variables["isAdult"] = *IsAdult
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": ListAnimeDocument,
"variables": variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
var listMediaF ListAnime
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
if err := json.Unmarshal(m, &listMediaF); err != nil {
return nil, err
}
return &listMediaF, nil
}
func ListMangaM(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Year *int,
Format *MediaFormat,
CountryOfOrigin *string,
IsAdult *bool,
logger *zerolog.Logger,
token string,
) (*ListManga, error) {
variables := map[string]interface{}{}
if Page != nil {
variables["page"] = *Page
}
if Search != nil {
variables["search"] = *Search
}
if PerPage != nil {
variables["perPage"] = *PerPage
}
if Sort != nil {
variables["sort"] = Sort
}
if Status != nil {
variables["status"] = Status
}
if Genres != nil {
variables["genres"] = Genres
}
if AverageScoreGreater != nil {
variables["averageScore_greater"] = *AverageScoreGreater * 10
}
if Year != nil {
variables["startDate_greater"] = lo.ToPtr(fmt.Sprintf("%d0000", *Year))
variables["startDate_lesser"] = lo.ToPtr(fmt.Sprintf("%d0000", *Year+1))
}
if Format != nil {
variables["format"] = *Format
}
if CountryOfOrigin != nil {
variables["countryOfOrigin"] = *CountryOfOrigin
}
if IsAdult != nil {
variables["isAdult"] = *IsAdult
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": ListMangaDocument,
"variables": variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
var listMediaF ListManga
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
if err := json.Unmarshal(m, &listMediaF); err != nil {
return nil, err
}
return &listMediaF, nil
}
func ListRecentAiringAnimeM(
Page *int,
Search *string,
PerPage *int,
AiringAtGreater *int,
AiringAtLesser *int,
NotYetAired *bool,
Sort []*AiringSort,
logger *zerolog.Logger,
token string,
) (*ListRecentAnime, error) {
variables := map[string]interface{}{}
if Page != nil {
variables["page"] = *Page
}
if Search != nil {
variables["search"] = *Search
}
if PerPage != nil {
variables["perPage"] = *PerPage
}
if AiringAtGreater != nil {
variables["airingAt_greater"] = *AiringAtGreater
}
if AiringAtLesser != nil {
variables["airingAt_lesser"] = *AiringAtLesser
}
if NotYetAired != nil {
variables["notYetAired"] = *NotYetAired
}
if Sort != nil {
variables["sort"] = Sort
} else {
variables["sort"] = []*AiringSort{lo.ToPtr(AiringSortTimeDesc)}
}
requestBody, err := json.Marshal(map[string]interface{}{
"query": ListRecentAiringAnimeQuery,
"variables": variables,
})
if err != nil {
return nil, err
}
data, err := customQuery(requestBody, logger, token)
if err != nil {
return nil, err
}
var listMediaF ListRecentAnime
m, err := json.Marshal(data)
if err != nil {
return nil, err
}
if err := json.Unmarshal(m, &listMediaF); err != nil {
return nil, err
}
return &listMediaF, nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func ListAnimeCacheKey(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Season *MediaSeason,
SeasonYear *int,
Format *MediaFormat,
IsAdult *bool,
) string {
key := "ListAnime"
if Page != nil {
key += fmt.Sprintf("_%d", *Page)
}
if Search != nil {
key += fmt.Sprintf("_%s", *Search)
}
if PerPage != nil {
key += fmt.Sprintf("_%d", *PerPage)
}
if Sort != nil {
key += fmt.Sprintf("_%v", Sort)
}
if Status != nil {
key += fmt.Sprintf("_%v", Status)
}
if Genres != nil {
key += fmt.Sprintf("_%v", Genres)
}
if AverageScoreGreater != nil {
key += fmt.Sprintf("_%d", *AverageScoreGreater)
}
if Season != nil {
key += fmt.Sprintf("_%s", *Season)
}
if SeasonYear != nil {
key += fmt.Sprintf("_%d", *SeasonYear)
}
if Format != nil {
key += fmt.Sprintf("_%s", *Format)
}
if IsAdult != nil {
key += fmt.Sprintf("_%t", *IsAdult)
}
return key
}
func ListMangaCacheKey(
Page *int,
Search *string,
PerPage *int,
Sort []*MediaSort,
Status []*MediaStatus,
Genres []*string,
AverageScoreGreater *int,
Season *MediaSeason,
SeasonYear *int,
Format *MediaFormat,
CountryOfOrigin *string,
IsAdult *bool,
) string {
key := "ListAnime"
if Page != nil {
key += fmt.Sprintf("_%d", *Page)
}
if Search != nil {
key += fmt.Sprintf("_%s", *Search)
}
if PerPage != nil {
key += fmt.Sprintf("_%d", *PerPage)
}
if Sort != nil {
key += fmt.Sprintf("_%v", Sort)
}
if Status != nil {
key += fmt.Sprintf("_%v", Status)
}
if Genres != nil {
key += fmt.Sprintf("_%v", Genres)
}
if AverageScoreGreater != nil {
key += fmt.Sprintf("_%d", *AverageScoreGreater)
}
if Season != nil {
key += fmt.Sprintf("_%s", *Season)
}
if SeasonYear != nil {
key += fmt.Sprintf("_%d", *SeasonYear)
}
if Format != nil {
key += fmt.Sprintf("_%s", *Format)
}
if CountryOfOrigin != nil {
key += fmt.Sprintf("_%s", *CountryOfOrigin)
}
if IsAdult != nil {
key += fmt.Sprintf("_%t", *IsAdult)
}
return key
}
const ListRecentAiringAnimeQuery = `query ListRecentAnime ($page: Int, $perPage: Int, $airingAt_greater: Int, $airingAt_lesser: Int, $sort: [AiringSort], $notYetAired: Boolean = false) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
total
perPage
currentPage
lastPage
}
airingSchedules(notYetAired: $notYetAired, sort: $sort, airingAt_greater: $airingAt_greater, airingAt_lesser: $airingAt_lesser) {
id
airingAt
episode
timeUntilAiring
media {
... baseAnime
}
}
}
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}
`

View File

@@ -0,0 +1,123 @@
package anilist
type MangaList = MangaCollection_MediaListCollection_Lists
type MangaListEntry = MangaCollection_MediaListCollection_Lists_Entries
func (ac *MangaCollection) GetListEntryFromMangaId(id int) (*MangaListEntry, bool) {
if ac == nil || ac.MediaListCollection == nil {
return nil, false
}
var entry *MangaCollection_MediaListCollection_Lists_Entries
for _, l := range ac.MediaListCollection.Lists {
if l.Entries == nil || len(l.Entries) == 0 {
continue
}
for _, e := range l.Entries {
if e.Media.ID == id {
entry = e
break
}
}
}
if entry == nil {
return nil, false
}
return entry, true
}
func (m *BaseManga) GetTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
return "N/A"
}
func (m *BaseManga) GetRomajiTitleSafe() string {
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return "N/A"
}
func (m *BaseManga) GetPreferredTitle() string {
if m.GetTitle().GetUserPreferred() != nil {
return *m.GetTitle().GetUserPreferred()
}
return m.GetTitleSafe()
}
func (m *BaseManga) GetCoverImageSafe() string {
if m.GetCoverImage().GetExtraLarge() != nil {
return *m.GetCoverImage().GetExtraLarge()
}
if m.GetCoverImage().GetLarge() != nil {
return *m.GetCoverImage().GetLarge()
}
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return ""
}
func (m *BaseManga) GetBannerImageSafe() string {
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return m.GetCoverImageSafe()
}
func (m *BaseManga) GetAllTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
titles = append(titles, m.Synonyms...)
}
return titles
}
func (m *BaseManga) GetMainTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
return titles
}
func (m *BaseManga) HasEnglishTitle() bool {
return m.Title.English != nil
}
func (m *BaseManga) HasRomajiTitle() bool {
return m.Title.Romaji != nil
}
func (m *BaseManga) HasSynonyms() bool {
return m.Synonyms != nil
}
func (m *BaseManga) GetStartYearSafe() int {
if m.GetStartDate() != nil && m.GetStartDate().GetYear() != nil {
return *m.GetStartDate().GetYear()
}
return 0
}
func (m *MangaListEntry) GetRepeatSafe() int {
if m.Repeat == nil {
return 0
}
return *m.Repeat
}

View File

@@ -0,0 +1,25 @@
package anilist
import (
"seanime/internal/util/result"
)
type BaseAnimeCache struct {
*result.Cache[int, *BaseAnime]
}
// NewBaseAnimeCache returns a new result.Cache[int, *BaseAnime].
// It is used to temporarily store the results of FetchMediaTree calls.
func NewBaseAnimeCache() *BaseAnimeCache {
return &BaseAnimeCache{result.NewCache[int, *BaseAnime]()}
}
type CompleteAnimeCache struct {
*result.Cache[int, *CompleteAnime]
}
// NewCompleteAnimeCache returns a new result.Cache[int, *CompleteAnime].
// It is used to temporarily store the results of FetchMediaTree calls.
func NewCompleteAnimeCache() *CompleteAnimeCache {
return &CompleteAnimeCache{result.NewCache[int, *CompleteAnime]()}
}

View File

@@ -0,0 +1,574 @@
package anilist
import (
"seanime/internal/util/comparison"
"github.com/samber/lo"
)
func (m *BaseAnime) GetTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
return ""
}
func (m *BaseAnime) GetEnglishTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return ""
}
func (m *BaseAnime) GetRomajiTitleSafe() string {
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return ""
}
func (m *BaseAnime) GetPreferredTitle() string {
if m.GetTitle().GetUserPreferred() != nil {
return *m.GetTitle().GetUserPreferred()
}
return m.GetTitleSafe()
}
func (m *BaseAnime) GetCoverImageSafe() string {
if m.GetCoverImage().GetExtraLarge() != nil {
return *m.GetCoverImage().GetExtraLarge()
}
if m.GetCoverImage().GetLarge() != nil {
return *m.GetCoverImage().GetLarge()
}
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return ""
}
func (m *BaseAnime) GetBannerImageSafe() string {
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return m.GetCoverImageSafe()
}
func (m *BaseAnime) IsMovieOrSingleEpisode() bool {
if m == nil {
return false
}
if m.GetTotalEpisodeCount() == 1 {
return true
}
return false
}
func (m *BaseAnime) GetSynonymsDeref() []string {
if m.Synonyms == nil {
return nil
}
return lo.Map(m.Synonyms, func(s *string, i int) string { return *s })
}
func (m *BaseAnime) GetSynonymsContainingSeason() []string {
if m.Synonyms == nil {
return nil
}
return lo.Filter(lo.Map(m.Synonyms, func(s *string, i int) string { return *s }), func(s string, i int) bool { return comparison.ValueContainsSeason(s) })
}
func (m *BaseAnime) GetStartYearSafe() int {
if m == nil || m.StartDate == nil || m.StartDate.Year == nil {
return 0
}
return *m.StartDate.Year
}
func (m *BaseAnime) IsMovie() bool {
if m == nil {
return false
}
if m.Format == nil {
return false
}
return *m.Format == MediaFormatMovie
}
func (m *BaseAnime) IsFinished() bool {
if m == nil {
return false
}
if m.Status == nil {
return false
}
return *m.Status == MediaStatusFinished
}
func (m *BaseAnime) GetAllTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
titles = append(titles, lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })...)
}
return titles
}
func (m *BaseAnime) GetAllTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
syn := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
for _, s := range syn {
titles = append(titles, *s)
}
}
return titles
}
func (m *BaseAnime) GetMainTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
return titles
}
func (m *BaseAnime) GetMainTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
return titles
}
// GetCurrentEpisodeCount returns the current episode number for that media and -1 if it doesn't have one.
// i.e. -1 is returned if the media has no episodes AND the next airing episode is not set.
func (m *BaseAnime) GetCurrentEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
if m.NextAiringEpisode != nil {
if m.NextAiringEpisode.Episode > 0 {
ceil = m.NextAiringEpisode.Episode - 1
}
}
return ceil
}
func (m *BaseAnime) GetCurrentEpisodeCountOrNil() *int {
n := m.GetCurrentEpisodeCount()
if n == -1 {
return nil
}
return &n
}
// GetTotalEpisodeCount returns the total episode number for that media and -1 if it doesn't have one
func (m *BaseAnime) GetTotalEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
return ceil
}
// GetTotalEpisodeCount returns the total episode number for that media and -1 if it doesn't have one
func (m *BaseAnime) GetTotalEpisodeCountOrNil() *int {
return m.Episodes
}
// GetPossibleSeasonNumber returns the possible season number for that media and -1 if it doesn't have one.
// It looks at the synonyms and returns the highest season number found.
func (m *BaseAnime) GetPossibleSeasonNumber() int {
if m == nil || m.Synonyms == nil || len(m.Synonyms) == 0 {
return -1
}
titles := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
seasons := lo.Map(titles, func(s *string, i int) int { return comparison.ExtractSeasonNumber(*s) })
return lo.Max(seasons)
}
func (m *BaseAnime) HasEnglishTitle() bool {
return m.Title.English != nil
}
func (m *BaseAnime) HasRomajiTitle() bool {
return m.Title.Romaji != nil
}
func (m *BaseAnime) HasSynonyms() bool {
return m.Synonyms != nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *CompleteAnime) GetTitleSafe() string {
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
return "N/A"
}
func (m *CompleteAnime) GetRomajiTitleSafe() string {
if m.GetTitle().GetRomaji() != nil {
return *m.GetTitle().GetRomaji()
}
if m.GetTitle().GetEnglish() != nil {
return *m.GetTitle().GetEnglish()
}
return "N/A"
}
func (m *CompleteAnime) GetPreferredTitle() string {
if m.GetTitle().GetUserPreferred() != nil {
return *m.GetTitle().GetUserPreferred()
}
return m.GetTitleSafe()
}
func (m *CompleteAnime) GetCoverImageSafe() string {
if m.GetCoverImage().GetExtraLarge() != nil {
return *m.GetCoverImage().GetExtraLarge()
}
if m.GetCoverImage().GetLarge() != nil {
return *m.GetCoverImage().GetLarge()
}
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return ""
}
func (m *CompleteAnime) GetBannerImageSafe() string {
if m.GetBannerImage() != nil {
return *m.GetBannerImage()
}
return m.GetCoverImageSafe()
}
func (m *CompleteAnime) IsMovieOrSingleEpisode() bool {
if m == nil {
return false
}
if m.GetTotalEpisodeCount() == 1 {
return true
}
return false
}
func (m *CompleteAnime) IsMovie() bool {
if m == nil {
return false
}
if m.Format == nil {
return false
}
return *m.Format == MediaFormatMovie
}
func (m *CompleteAnime) IsFinished() bool {
if m == nil {
return false
}
if m.Status == nil {
return false
}
return *m.Status == MediaStatusFinished
}
func (m *CompleteAnime) GetAllTitles() []*string {
titles := make([]*string, 0)
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
titles = append(titles, lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })...)
}
return titles
}
func (m *CompleteAnime) GetAllTitlesDeref() []string {
titles := make([]string, 0)
if m.HasRomajiTitle() {
titles = append(titles, *m.Title.Romaji)
}
if m.HasEnglishTitle() {
titles = append(titles, *m.Title.English)
}
if m.HasSynonyms() && len(m.Synonyms) > 1 {
syn := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
for _, s := range syn {
titles = append(titles, *s)
}
}
return titles
}
// GetCurrentEpisodeCount returns the current episode number for that media and -1 if it doesn't have one.
// i.e. -1 is returned if the media has no episodes AND the next airing episode is not set.
func (m *CompleteAnime) GetCurrentEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
if m.NextAiringEpisode != nil {
if m.NextAiringEpisode.Episode > 0 {
ceil = m.NextAiringEpisode.Episode - 1
}
}
return ceil
}
// GetTotalEpisodeCount returns the total episode number for that media and -1 if it doesn't have one
func (m *CompleteAnime) GetTotalEpisodeCount() int {
ceil := -1
if m.Episodes != nil {
ceil = *m.Episodes
}
return ceil
}
// GetPossibleSeasonNumber returns the possible season number for that media and -1 if it doesn't have one.
// It looks at the synonyms and returns the highest season number found.
func (m *CompleteAnime) GetPossibleSeasonNumber() int {
if m == nil || m.Synonyms == nil || len(m.Synonyms) == 0 {
return -1
}
titles := lo.Filter(m.Synonyms, func(s *string, i int) bool { return comparison.ValueContainsSeason(*s) })
if m.HasEnglishTitle() {
titles = append(titles, m.Title.English)
}
if m.HasRomajiTitle() {
titles = append(titles, m.Title.Romaji)
}
seasons := lo.Map(titles, func(s *string, i int) int { return comparison.ExtractSeasonNumber(*s) })
return lo.Max(seasons)
}
func (m *CompleteAnime) HasEnglishTitle() bool {
return m.Title.English != nil
}
func (m *CompleteAnime) HasRomajiTitle() bool {
return m.Title.Romaji != nil
}
func (m *CompleteAnime) HasSynonyms() bool {
return m.Synonyms != nil
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var EdgeNarrowFormats = []MediaFormat{MediaFormatTv, MediaFormatTvShort}
var EdgeBroaderFormats = []MediaFormat{MediaFormatTv, MediaFormatTvShort, MediaFormatOna, MediaFormatOva, MediaFormatMovie, MediaFormatSpecial}
func (m *CompleteAnime) FindEdge(relation string, formats []MediaFormat) (*BaseAnime, bool) {
if m.GetRelations() == nil {
return nil, false
}
edges := m.GetRelations().GetEdges()
for _, edge := range edges {
if edge.GetRelationType().String() == relation {
for _, fm := range formats {
if fm.String() == edge.GetNode().GetFormat().String() {
return edge.GetNode(), true
}
}
}
}
return nil, false
}
func (e *CompleteAnime_Relations_Edges) IsBroadRelationFormat() bool {
if e.GetNode() == nil {
return false
}
if e.GetNode().GetFormat() == nil {
return false
}
for _, fm := range EdgeBroaderFormats {
if fm.String() == e.GetNode().GetFormat().String() {
return true
}
}
return false
}
func (e *CompleteAnime_Relations_Edges) IsNarrowRelationFormat() bool {
if e.GetNode() == nil {
return false
}
if e.GetNode().GetFormat() == nil {
return false
}
for _, fm := range EdgeNarrowFormats {
if fm.String() == e.GetNode().GetFormat().String() {
return true
}
}
return false
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *CompleteAnime) ToBaseAnime() *BaseAnime {
if m == nil {
return nil
}
var trailer *BaseAnime_Trailer
if m.GetTrailer() != nil {
trailer = &BaseAnime_Trailer{
ID: m.GetTrailer().GetID(),
Site: m.GetTrailer().GetSite(),
Thumbnail: m.GetTrailer().GetThumbnail(),
}
}
var nextAiringEpisode *BaseAnime_NextAiringEpisode
if m.GetNextAiringEpisode() != nil {
nextAiringEpisode = &BaseAnime_NextAiringEpisode{
AiringAt: m.GetNextAiringEpisode().GetAiringAt(),
TimeUntilAiring: m.GetNextAiringEpisode().GetTimeUntilAiring(),
Episode: m.GetNextAiringEpisode().GetEpisode(),
}
}
var startDate *BaseAnime_StartDate
if m.GetStartDate() != nil {
startDate = &BaseAnime_StartDate{
Year: m.GetStartDate().GetYear(),
Month: m.GetStartDate().GetMonth(),
Day: m.GetStartDate().GetDay(),
}
}
var endDate *BaseAnime_EndDate
if m.GetEndDate() != nil {
endDate = &BaseAnime_EndDate{
Year: m.GetEndDate().GetYear(),
Month: m.GetEndDate().GetMonth(),
Day: m.GetEndDate().GetDay(),
}
}
return &BaseAnime{
ID: m.GetID(),
IDMal: m.GetIDMal(),
SiteURL: m.GetSiteURL(),
Format: m.GetFormat(),
Episodes: m.GetEpisodes(),
Status: m.GetStatus(),
Synonyms: m.GetSynonyms(),
BannerImage: m.GetBannerImage(),
Season: m.GetSeason(),
SeasonYear: m.GetSeasonYear(),
Type: m.GetType(),
IsAdult: m.GetIsAdult(),
CountryOfOrigin: m.GetCountryOfOrigin(),
Genres: m.GetGenres(),
Duration: m.GetDuration(),
Description: m.GetDescription(),
MeanScore: m.GetMeanScore(),
Trailer: trailer,
Title: &BaseAnime_Title{
UserPreferred: m.GetTitle().GetUserPreferred(),
Romaji: m.GetTitle().GetRomaji(),
English: m.GetTitle().GetEnglish(),
Native: m.GetTitle().GetNative(),
},
CoverImage: &BaseAnime_CoverImage{
ExtraLarge: m.GetCoverImage().GetExtraLarge(),
Large: m.GetCoverImage().GetLarge(),
Medium: m.GetCoverImage().GetMedium(),
Color: m.GetCoverImage().GetColor(),
},
StartDate: startDate,
EndDate: endDate,
NextAiringEpisode: nextAiringEpisode,
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func (m *AnimeListEntry) GetProgressSafe() int {
if m == nil {
return 0
}
if m.Progress == nil {
return 0
}
return *m.Progress
}
func (m *AnimeListEntry) GetScoreSafe() float64 {
if m == nil {
return 0
}
if m.Score == nil {
return 0
}
return *m.Score
}
func (m *AnimeListEntry) GetRepeatSafe() int {
if m == nil {
return 0
}
if m.Repeat == nil {
return 0
}
return *m.Repeat
}
func (m *AnimeListEntry) GetStatusSafe() MediaListStatus {
if m == nil {
return ""
}
if m.Status == nil {
return ""
}
return *m.Status
}

View File

@@ -0,0 +1,155 @@
package anilist
import (
"context"
"github.com/samber/lo"
"seanime/internal/util"
"seanime/internal/util/limiter"
"seanime/internal/util/result"
"sync"
)
type (
CompleteAnimeRelationTree struct {
*result.Map[int, *CompleteAnime]
}
FetchMediaTreeRelation = string
)
const (
FetchMediaTreeSequels FetchMediaTreeRelation = "sequels"
FetchMediaTreePrequels FetchMediaTreeRelation = "prequels"
FetchMediaTreeAll FetchMediaTreeRelation = "all"
)
// NewCompleteAnimeRelationTree returns a new result.Map[int, *CompleteAnime].
// It is used to store the results of FetchMediaTree or FetchMediaTree calls.
func NewCompleteAnimeRelationTree() *CompleteAnimeRelationTree {
return &CompleteAnimeRelationTree{result.NewResultMap[int, *CompleteAnime]()}
}
func (m *BaseAnime) FetchMediaTree(rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) (err error) {
if m == nil {
return nil
}
defer util.HandlePanicInModuleWithError("anilist/BaseAnime.FetchMediaTree", &err)
rl.Wait()
res, err := anilistClient.CompleteAnimeByID(context.Background(), &m.ID)
if err != nil {
return err
}
return res.GetMedia().FetchMediaTree(rel, anilistClient, rl, tree, cache)
}
// FetchMediaTree populates the CompleteAnimeRelationTree with the given media's sequels and prequels.
// It also takes a CompleteAnimeCache to store the fetched media in and avoid duplicate fetches.
// It also takes a limiter.Limiter to limit the number of requests made to the AniList API.
func (m *CompleteAnime) FetchMediaTree(rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) (err error) {
if m == nil {
return nil
}
defer util.HandlePanicInModuleWithError("anilist/CompleteAnime.FetchMediaTree", &err)
if tree.Has(m.ID) {
cache.Set(m.ID, m)
return nil
}
cache.Set(m.ID, m)
tree.Set(m.ID, m)
if m.Relations == nil {
return nil
}
// Get all edges
edges := m.GetRelations().GetEdges()
// Filter edges
edges = lo.Filter(edges, func(_edge *CompleteAnime_Relations_Edges, _ int) bool {
return (*_edge.RelationType == MediaRelationSequel || *_edge.RelationType == MediaRelationPrequel) &&
*_edge.GetNode().Status != MediaStatusNotYetReleased &&
_edge.IsBroadRelationFormat() && !tree.Has(_edge.GetNode().ID)
})
if len(edges) == 0 {
return nil
}
doneCh := make(chan struct{})
processEdges(edges, rel, anilistClient, rl, tree, cache, doneCh)
for {
select {
case <-doneCh:
return nil
default:
}
}
}
// processEdges fetches the next node(s) for each edge in parallel.
func processEdges(edges []*CompleteAnime_Relations_Edges, rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache, doneCh chan struct{}) {
var wg sync.WaitGroup
wg.Add(len(edges))
for i, item := range edges {
go func(edge *CompleteAnime_Relations_Edges, _ int) {
defer wg.Done()
if edge == nil {
return
}
processEdge(edge, rel, anilistClient, rl, tree, cache)
}(item, i)
}
wg.Wait()
go func() {
close(doneCh)
}()
}
func processEdge(edge *CompleteAnime_Relations_Edges, rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) {
defer util.HandlePanicInModuleThen("anilist/processEdge", func() {})
cacheV, ok := cache.Get(edge.GetNode().ID)
edgeCompleteAnime := cacheV
if !ok {
rl.Wait()
// Fetch the next node
res, err := anilistClient.CompleteAnimeByID(context.Background(), &edge.GetNode().ID)
if err == nil {
edgeCompleteAnime = res.GetMedia()
cache.Set(edgeCompleteAnime.ID, edgeCompleteAnime)
}
}
if edgeCompleteAnime == nil {
return
}
// Get the relation type to fetch for the next node
edgeRel := getEdgeRelation(edge, rel)
// Fetch the next node(s)
err := edgeCompleteAnime.FetchMediaTree(edgeRel, anilistClient, rl, tree, cache)
if err != nil {
return
}
}
// getEdgeRelation returns the relation to fetch for the next node based on the current edge and the relation to fetch.
// If the relation to fetch is FetchMediaTreeAll, it will return FetchMediaTreePrequels for prequels and FetchMediaTreeSequels for sequels.
//
// For example, if the current node is a sequel and the relation to fetch is FetchMediaTreeAll, it will return FetchMediaTreeSequels so that
// only sequels are fetched for the next node.
func getEdgeRelation(edge *CompleteAnime_Relations_Edges, rel FetchMediaTreeRelation) FetchMediaTreeRelation {
if rel == FetchMediaTreeAll {
if *edge.RelationType == MediaRelationPrequel {
return FetchMediaTreePrequels
}
if *edge.RelationType == MediaRelationSequel {
return FetchMediaTreeSequels
}
}
return rel
}

View File

@@ -0,0 +1,82 @@
package anilist
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"seanime/internal/test_utils"
"seanime/internal/util/limiter"
"testing"
)
func TestBaseAnime_FetchMediaTree_BaseAnime(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
lim := limiter.NewAnilistLimiter()
completeAnimeCache := NewCompleteAnimeCache()
tests := []struct {
name string
mediaId int
edgeIds []int
}{
{
name: "Bungo Stray Dogs",
mediaId: 103223,
edgeIds: []int{
21311, // BSD1
21679, // BSD2
103223, // BSD3
141249, // BSD4
163263, // BSD5
},
},
{
name: "Re:Zero",
mediaId: 21355,
edgeIds: []int{
21355, // Re:Zero 1
108632, // Re:Zero 2
119661, // Re:Zero 2 Part 2
163134, // Re:Zero 3
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mediaF, err := anilistClient.CompleteAnimeByID(context.Background(), &tt.mediaId)
if assert.NoError(t, err) {
media := mediaF.GetMedia()
tree := NewCompleteAnimeRelationTree()
err = media.FetchMediaTree(
FetchMediaTreeAll,
anilistClient,
lim,
tree,
completeAnimeCache,
)
if assert.NoError(t, err) {
for _, treeId := range tt.edgeIds {
a, found := tree.Get(treeId)
assert.Truef(t, found, "expected tree to contain %d", treeId)
spew.Dump(a.GetTitleSafe())
}
}
}
})
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,456 @@
query AnimeCollection ($userName: String) {
MediaListCollection(userName: $userName, forceSingleCompletedList: true, type: ANIME) {
lists {
status
name
isCustomList
entries {
id
score(format: POINT_100)
progress
status
notes
repeat
private
startedAt {
year
month
day
}
completedAt {
year
month
day
}
media {
...baseAnime
}
}
}
}
}
query AnimeCollectionWithRelations ($userName: String) {
MediaListCollection(userName: $userName, forceSingleCompletedList: true, type: ANIME) {
lists {
status
name
isCustomList
entries {
id
score(format: POINT_100)
progress
status
notes
repeat
private
startedAt {
year
month
day
}
completedAt {
year
month
day
}
media {
...completeAnime
}
}
}
}
}
query BaseAnimeByMalId ($id: Int) {
Media(idMal: $id, type: ANIME) {
...baseAnime
}
}
query BaseAnimeById ($id: Int) {
Media(id: $id, type: ANIME) {
...baseAnime
}
}
query SearchBaseAnimeByIds ($ids: [Int], $page: Int, $perPage: Int, $status: [MediaStatus], $inCollection: Boolean, $sort: [MediaSort], $season: MediaSeason, $year: Int, $genre: String, $format: MediaFormat) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
},
media(id_in: $ids, type: ANIME, status_in: $status, onList: $inCollection, sort: $sort, season: $season, seasonYear: $year, genre: $genre, format: $format) {
...baseAnime
}
}
}
query CompleteAnimeById ($id: Int) {
Media(id: $id, type: ANIME) {
...completeAnime
}
}
# For view (will be cached)
query AnimeDetailsById ($id: Int) {
Media(id: $id, type: ANIME) {
siteUrl
id
duration
genres
averageScore
popularity
meanScore
description
trailer {
id
site
thumbnail
}
startDate {
year
month
day
}
endDate {
year
month
day
}
studios(isMain: true) {
nodes {
name
id
}
}
characters(sort: [ROLE]) {
edges {
id
role
name
node {
...baseCharacter
}
}
}
staff(sort: [RELEVANCE]) {
edges {
role
node {
name {
full
}
id
}
}
}
rankings {
context
type
rank
year
format
allTime
season
}
recommendations(page: 1, perPage: 8, sort: RATING_DESC) {
edges {
node {
mediaRecommendation {
id
idMal
siteUrl
status(version: 2)
isAdult
season
type
format
meanScore
description
episodes
trailer {
id
site
thumbnail
}
startDate {
year
month
day
}
coverImage {
extraLarge
large
medium
color
}
bannerImage
title {
romaji
english
native
userPreferred
}
}
}
}
}
relations {
edges {
relationType(version: 2)
node {
...baseAnime
}
}
}
}
}
query ListAnime(
$page: Int
$search: String
$perPage: Int
$sort: [MediaSort]
$status: [MediaStatus]
$genres: [String]
$averageScore_greater: Int
$season: MediaSeason
$seasonYear: Int
$format: MediaFormat
$isAdult: Boolean
) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
total
perPage
currentPage
lastPage
}
media(
type: ANIME
search: $search
sort: $sort
status_in: $status
isAdult: $isAdult
format: $format
genre_in: $genres
averageScore_greater: $averageScore_greater
season: $season
seasonYear: $seasonYear
format_not: MUSIC
) {
...baseAnime
}
}
}
query ListRecentAnime ($page: Int, $perPage: Int, $airingAt_greater: Int, $airingAt_lesser: Int, $notYetAired: Boolean = false) {
Page(page: $page, perPage: $perPage) {
pageInfo {
hasNextPage
total
perPage
currentPage
lastPage
}
airingSchedules(notYetAired: $notYetAired, sort: TIME_DESC, airingAt_greater: $airingAt_greater, airingAt_lesser: $airingAt_lesser) {
id
airingAt
episode
timeUntilAiring
media {
... baseAnime
}
}
}
}
fragment baseAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
seasonYear
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
}
fragment completeAnime on Media {
id
idMal
siteUrl
status(version: 2)
season
seasonYear
type
format
bannerImage
episodes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
duration
trailer {
id
site
thumbnail
}
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
nextAiringEpisode {
airingAt
timeUntilAiring
episode
}
relations {
edges {
relationType(version: 2)
node {
...baseAnime
}
}
}
}
fragment baseCharacter on Character {
id
isFavourite
gender
age
dateOfBirth {
year
month
day
}
name {
full
native
alternative
}
image {
large
}
description
siteUrl
}
query AnimeAiringSchedule($ids: [Int],$season: MediaSeason, $seasonYear: Int, $previousSeason: MediaSeason, $previousSeasonYear: Int, $nextSeason: MediaSeason, $nextSeasonYear: Int) {
ongoing: Page {
media(id_in: $ids, type: ANIME, season: $season, seasonYear: $seasonYear, onList: true) {
...animeSchedule
}
}
ongoingNext: Page(page: 2) {
media(id_in: $ids, type: ANIME, season: $season, seasonYear: $seasonYear, onList: true) {
...animeSchedule
}
}
upcoming: Page {
media(id_in: $ids, type: ANIME, season: $nextSeason, seasonYear: $nextSeasonYear, sort: [START_DATE], onList: true) {
...animeSchedule
}
}
upcomingNext: Page(page: 2) {
media(id_in: $ids, type: ANIME, season: $nextSeason, seasonYear: $nextSeasonYear, sort: [START_DATE], onList: true) {
...animeSchedule
}
}
preceding: Page {
media(id_in: $ids, type: ANIME, season: $previousSeason, seasonYear: $previousSeasonYear, onList: true) {
...animeSchedule
}
}
}
query AnimeAiringScheduleRaw($ids: [Int]) {
Page {
media(id_in: $ids, type: ANIME, onList: true) {
...animeSchedule
}
}
}
fragment animeSchedule on Media {
id,
idMal
previous: airingSchedule(notYetAired: false, perPage: 30) {
nodes {
airingAt
timeUntilAiring
episode
}
},
upcoming: airingSchedule(notYetAired: true, perPage: 30) {
nodes {
airingAt
timeUntilAiring
episode
}
}
}

View File

@@ -0,0 +1,56 @@
mutation UpdateMediaListEntry (
$mediaId: Int
$status: MediaListStatus
$scoreRaw: Int
$progress: Int
$startedAt: FuzzyDateInput
$completedAt: FuzzyDateInput
) {
SaveMediaListEntry(
mediaId: $mediaId
status: $status
scoreRaw: $scoreRaw
progress: $progress
startedAt: $startedAt
completedAt: $completedAt
) {
id
}
}
mutation UpdateMediaListEntryProgress (
$mediaId: Int
$progress: Int
$status: MediaListStatus
) {
SaveMediaListEntry(
mediaId: $mediaId
progress: $progress
status: $status
) {
id
}
}
mutation DeleteEntry (
$mediaListEntryId: Int
) {
DeleteMediaListEntry(
id: $mediaListEntryId
) {
deleted
}
}
mutation UpdateMediaListEntryRepeat (
$mediaId: Int
$repeat: Int
) {
SaveMediaListEntry(
mediaId: $mediaId
repeat: $repeat
) {
id
}
}

View File

@@ -0,0 +1,200 @@
query MangaCollection ($userName: String) {
MediaListCollection(userName: $userName, forceSingleCompletedList: true, type: MANGA) {
lists {
status
name
isCustomList
entries {
id
score(format: POINT_100)
progress
status
notes
repeat
private
startedAt {
year
month
day
}
completedAt {
year
month
day
}
media {
...baseManga
}
}
}
}
}
query SearchBaseManga($page: Int, $perPage: Int, $sort: [MediaSort], $search: String, $status: [MediaStatus]){
Page(page: $page, perPage: $perPage){
pageInfo{
hasNextPage
},
media(type: MANGA, search: $search, sort: $sort, status_in: $status, format_not: NOVEL){
...baseManga
}
}
}
query BaseMangaById ($id: Int) {
Media(id: $id, type: MANGA) {
...baseManga
}
}
# For view (will be cached)
query MangaDetailsById ($id: Int) {
Media(id: $id, type: MANGA) {
siteUrl
id
duration
genres
rankings {
context
type
rank
year
format
allTime
season
}
characters(sort: [ROLE]) {
edges {
id
role
name
node {
...baseCharacter
}
}
}
recommendations(page: 1, perPage: 8, sort: RATING_DESC) {
edges {
node {
mediaRecommendation {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
chapters
volumes
synonyms
isAdult
countryOfOrigin
meanScore
description
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
}
}
}
}
relations {
edges {
relationType(version: 2)
node {
...baseManga
}
}
}
}
}
query ListManga(
$page: Int
$search: String
$perPage: Int
$sort: [MediaSort]
$status: [MediaStatus]
$genres: [String]
$averageScore_greater: Int
$startDate_greater: FuzzyDateInt
$startDate_lesser: FuzzyDateInt
$format: MediaFormat
$countryOfOrigin: CountryCode
$isAdult: Boolean
) {
Page(page: $page, perPage: $perPage){
pageInfo{
hasNextPage
total
perPage
currentPage
lastPage
},
media(type: MANGA, isAdult: $isAdult, countryOfOrigin: $countryOfOrigin, search: $search, sort: $sort, status_in: $status, format: $format, genre_in: $genres, averageScore_greater: $averageScore_greater, startDate_greater: $startDate_greater, startDate_lesser: $startDate_lesser, format_not: NOVEL){
...baseManga
}
}
}
fragment baseManga on Media {
id
idMal
siteUrl
status(version: 2)
season
type
format
bannerImage
chapters
volumes
synonyms
isAdult
countryOfOrigin
meanScore
description
genres
title {
userPreferred
romaji
english
native
}
coverImage {
extraLarge
large
medium
color
}
startDate {
year
month
day
}
endDate {
year
month
day
}
}

View File

@@ -0,0 +1,126 @@
query ViewerStats {
Viewer {
statistics {
anime {
count
minutesWatched
episodesWatched
meanScore
formats {
...UserFormatStats
}
genres {
...UserGenreStats
}
statuses {
...UserStatusStats
}
studios {
...UserStudioStats
}
scores {
...UserScoreStats
}
startYears {
...UserStartYearStats
}
releaseYears {
...UserReleaseYearStats
}
}
manga {
count
chaptersRead
meanScore
formats {
...UserFormatStats
}
genres {
...UserGenreStats
}
statuses {
...UserStatusStats
}
studios {
...UserStudioStats
}
scores {
...UserScoreStats
}
startYears {
...UserStartYearStats
}
releaseYears {
...UserReleaseYearStats
}
}
}
}
}
fragment UserFormatStats on UserFormatStatistic {
format
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserGenreStats on UserGenreStatistic {
genre
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserStatusStats on UserStatusStatistic {
status
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserScoreStats on UserScoreStatistic {
score
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserStudioStats on UserStudioStatistic {
studio {
id
name
isAnimationStudio
}
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserStartYearStats on UserStartYearStatistic {
startYear
meanScore
count
minutesWatched
mediaIds
chaptersRead
}
fragment UserReleaseYearStats on UserReleaseYearStatistic {
releaseYear
meanScore
count
minutesWatched
mediaIds
chaptersRead
}

View File

@@ -0,0 +1,12 @@
query StudioDetails($id: Int) {
Studio(id: $id) {
id
isAnimationStudio
name
media (perPage: 80, sort: TRENDING_DESC, isMain: true) {
nodes {
...baseAnime
}
}
}
}

View File

@@ -0,0 +1,16 @@
query GetViewer {
Viewer {
name
avatar {
large
medium
}
bannerImage
isBlocked
options {
displayAdultContent
airingNotifications
profileColor
}
}
}

View File

@@ -0,0 +1,72 @@
package anilist
import (
"context"
"seanime/internal/util"
)
type (
Stats struct {
AnimeStats *AnimeStats `json:"animeStats"`
MangaStats *MangaStats `json:"mangaStats"`
}
AnimeStats struct {
Count int `json:"count"`
MinutesWatched int `json:"minutesWatched"`
EpisodesWatched int `json:"episodesWatched"`
MeanScore float64 `json:"meanScore"`
Genres []*UserGenreStats `json:"genres"`
Formats []*UserFormatStats `json:"formats"`
Statuses []*UserStatusStats `json:"statuses"`
Studios []*UserStudioStats `json:"studios"`
Scores []*UserScoreStats `json:"scores"`
StartYears []*UserStartYearStats `json:"startYears"`
ReleaseYears []*UserReleaseYearStats `json:"releaseYears"`
}
MangaStats struct {
Count int `json:"count"`
ChaptersRead int `json:"chaptersRead"`
MeanScore float64 `json:"meanScore"`
Genres []*UserGenreStats `json:"genres"`
Statuses []*UserStatusStats `json:"statuses"`
Scores []*UserScoreStats `json:"scores"`
StartYears []*UserStartYearStats `json:"startYears"`
ReleaseYears []*UserReleaseYearStats `json:"releaseYears"`
}
)
func GetStats(ctx context.Context, stats *ViewerStats) (ret *Stats, err error) {
defer util.HandlePanicInModuleWithError("api/anilist/GetStats", &err)
allStats := stats.GetViewer().GetStatistics()
ret = &Stats{
AnimeStats: &AnimeStats{
Count: allStats.GetAnime().GetCount(),
MinutesWatched: allStats.GetAnime().GetMinutesWatched(),
EpisodesWatched: allStats.GetAnime().GetEpisodesWatched(),
MeanScore: allStats.GetAnime().GetMeanScore(),
Genres: allStats.GetAnime().GetGenres(),
Formats: allStats.GetAnime().GetFormats(),
Statuses: allStats.GetAnime().GetStatuses(),
Studios: allStats.GetAnime().GetStudios(),
Scores: allStats.GetAnime().GetScores(),
StartYears: allStats.GetAnime().GetStartYears(),
ReleaseYears: allStats.GetAnime().GetReleaseYears(),
},
MangaStats: &MangaStats{
Count: allStats.GetManga().GetCount(),
ChaptersRead: allStats.GetManga().GetChaptersRead(),
MeanScore: allStats.GetManga().GetMeanScore(),
Genres: allStats.GetManga().GetGenres(),
Statuses: allStats.GetManga().GetStatuses(),
Scores: allStats.GetManga().GetScores(),
StartYears: allStats.GetManga().GetStartYears(),
ReleaseYears: allStats.GetManga().GetReleaseYears(),
},
}
return ret, nil
}

View File

@@ -0,0 +1,60 @@
package anilist
import (
"time"
)
type GetSeasonKind int
const (
GetSeasonKindCurrent GetSeasonKind = iota
GetSeasonKindNext
GetSeasonKindPrevious
)
func GetSeasonInfo(now time.Time, kind GetSeasonKind) (MediaSeason, int) {
month, year := now.Month(), now.Year()
getSeasonIndex := func(m time.Month) int {
switch {
case m >= 3 && m <= 5: // spring: 3, 4, 5
return 1
case m >= 6 && m <= 8: // summer: 6, 7, 8
return 2
case m >= 9 && m <= 11: // fall: 9, 10, 11
return 3
default: // winter: 12, 1, 2
return 0
}
}
seasons := []MediaSeason{MediaSeasonWinter, MediaSeasonSpring, MediaSeasonSummer, MediaSeasonFall}
var index int
switch kind {
case GetSeasonKindCurrent:
index = getSeasonIndex(month)
case GetSeasonKindNext:
nextMonth := month + 3
nextYear := year
if nextMonth > 12 {
nextMonth -= 12
nextYear++
}
index = getSeasonIndex(nextMonth)
year = nextYear
case GetSeasonKindPrevious:
prevMonth := month - 3
prevYear := year
if prevMonth <= 0 {
prevMonth += 12
prevYear--
}
index = getSeasonIndex(prevMonth)
year = prevYear
}
return seasons[index], year
}

View File

@@ -0,0 +1,34 @@
package anilist
import (
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestGetSeason(t *testing.T) {
tests := []struct {
now time.Time
kind GetSeasonKind
expectedSeason MediaSeason
expectedYear int
}{
{time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonWinter, 2025},
{time.Date(2025, 4, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonSpring, 2025},
{time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonSummer, 2025},
{time.Date(2025, 10, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindCurrent, MediaSeasonFall, 2025},
{time.Date(2025, 10, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindNext, MediaSeasonWinter, 2026},
{time.Date(2025, 12, 31, 23, 59, 59, 999999999, time.UTC), GetSeasonKindCurrent, MediaSeasonWinter, 2025},
{time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), GetSeasonKindNext, MediaSeasonSpring, 2025},
}
for _, tt := range tests {
t.Run(tt.now.Format(time.RFC3339), func(t *testing.T) {
t.Logf("%s", tt.now.Format(time.RFC3339))
season, year := GetSeasonInfo(tt.now, tt.kind)
require.Equal(t, tt.expectedSeason, season, "Expected season %v, got %v", tt.expectedSeason, season)
require.Equal(t, tt.expectedYear, year, "Expected year %d, got %d", tt.expectedYear, year)
})
}
}

View File

@@ -0,0 +1,137 @@
package animap
import (
"errors"
"io"
"net/http"
"seanime/internal/constants"
"seanime/internal/hook"
"seanime/internal/util/result"
"strconv"
"github.com/goccy/go-json"
)
type (
Anime struct {
Title string `json:"title"`
Titles map[string]string `json:"titles,omitempty"`
StartDate string `json:"startDate,omitempty"` // YYYY-MM-DD
EndDate string `json:"endDate,omitempty"` // YYYY-MM-DD
Status string `json:"status"` // Finished, Airing, Upcoming, etc.
Type string `json:"type"` // TV, OVA, Movie, etc.
Episodes map[string]*Episode `json:"episodes,omitzero"` // Indexed by AniDB episode number, "1", "S1", etc.
Mappings *AnimeMapping `json:"mappings,omitzero"`
}
AnimeMapping struct {
AnidbID int `json:"anidb_id,omitempty"`
AnilistID int `json:"anilist_id,omitempty"`
KitsuID int `json:"kitsu_id,omitempty"`
TheTvdbID int `json:"thetvdb_id,omitempty"`
TheMovieDbID string `json:"themoviedb_id,omitempty"` // Can be int or string, forced to string
MalID int `json:"mal_id,omitempty"`
LivechartID int `json:"livechart_id,omitempty"`
AnimePlanetID string `json:"animeplanet_id,omitempty"` // Can be int or string, forced to string
AnisearchID int `json:"anisearch_id,omitempty"`
SimklID int `json:"simkl_id,omitempty"`
NotifyMoeID string `json:"notifymoe_id,omitempty"`
AnimecountdownID int `json:"animecountdown_id,omitempty"`
Type string `json:"type,omitempty"`
}
Episode struct {
AnidbEpisode string `json:"anidbEpisode"`
AnidbId int `json:"anidbEid"`
TvdbId int `json:"tvdbEid,omitempty"`
TvdbShowId int `json:"tvdbShowId,omitempty"`
AirDate string `json:"airDate,omitempty"` // YYYY-MM-DD
AnidbTitle string `json:"anidbTitle,omitempty"` // Title of the episode from AniDB
TvdbTitle string `json:"tvdbTitle,omitempty"` // Title of the episode from TVDB
Overview string `json:"overview,omitempty"`
Image string `json:"image,omitempty"`
Runtime int `json:"runtime,omitempty"` // minutes
Length string `json:"length,omitempty"` // Xm
SeasonNumber int `json:"seasonNumber,omitempty"`
SeasonName string `json:"seasonName,omitempty"`
Number int `json:"number"`
AbsoluteNumber int `json:"absoluteNumber,omitempty"`
}
)
//----------------------------------------------------------------------------------------------------------------------
type Cache struct {
*result.Cache[string, *Anime]
}
// FetchAnimapMedia fetches animap.Anime from the Animap API.
func FetchAnimapMedia(from string, id int) (*Anime, error) {
// Event
reqEvent := &AnimapMediaRequestedEvent{
From: from,
Id: id,
Media: &Anime{},
}
err := hook.GlobalHookManager.OnAnimapMediaRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if reqEvent.DefaultPrevented {
return reqEvent.Media, nil
}
from = reqEvent.From
id = reqEvent.Id
apiUrl := constants.InternalMetadataURL + "/entry?" + from + "_id=" + strconv.Itoa(id)
request, err := http.NewRequest("GET", apiUrl, nil)
if err != nil {
return nil, err
}
request.Header.Set("X-Seanime-Version", "Seanime/"+constants.Version)
// Send an HTTP GET request
response, err := http.DefaultClient.Do(request)
if err != nil {
return nil, err
}
defer response.Body.Close()
if response.StatusCode != 200 {
return nil, errors.New("not found on Animap")
}
// Read the response body
responseBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
// Unmarshal the JSON data into AnimapData
var media Anime
if err := json.Unmarshal(responseBody, &media); err != nil {
return nil, err
}
// Event
event := &AnimapMediaEvent{
Media: &media,
}
err = hook.GlobalHookManager.OnAnimapMedia().Trigger(event)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if event.DefaultPrevented {
return event.Media, nil
}
return event.Media, nil
}

View File

@@ -0,0 +1,19 @@
package animap
import "seanime/internal/hook_resolver"
// AnimapMediaRequestedEvent is triggered when the Animap media is requested.
// Prevent default to skip the default behavior and return your own data.
type AnimapMediaRequestedEvent struct {
hook_resolver.Event
From string `json:"from"`
Id int `json:"id"`
// Empty data object, will be used if the hook prevents the default behavior
Media *Anime `json:"media"`
}
// AnimapMediaEvent is triggered after processing AnimapMedia.
type AnimapMediaEvent struct {
hook_resolver.Event
Media *Anime `json:"media"`
}

View File

@@ -0,0 +1,156 @@
package anizip
import (
"errors"
"io"
"net/http"
"seanime/internal/hook"
"seanime/internal/util/result"
"strconv"
"github.com/goccy/go-json"
)
// AniZip is the API used for fetching anime metadata and mappings.
type (
Episode struct {
TvdbEid int `json:"tvdbEid,omitempty"`
AirDate string `json:"airdate,omitempty"`
SeasonNumber int `json:"seasonNumber,omitempty"`
EpisodeNumber int `json:"episodeNumber,omitempty"`
AbsoluteEpisodeNumber int `json:"absoluteEpisodeNumber,omitempty"`
Title map[string]string `json:"title,omitempty"`
Image string `json:"image,omitempty"`
Summary string `json:"summary,omitempty"`
Overview string `json:"overview,omitempty"`
Runtime int `json:"runtime,omitempty"`
Length int `json:"length,omitempty"`
Episode string `json:"episode,omitempty"`
AnidbEid int `json:"anidbEid,omitempty"`
Rating string `json:"rating,omitempty"`
}
Mappings struct {
AnimeplanetID string `json:"animeplanet_id,omitempty"`
KitsuID int `json:"kitsu_id,omitempty"`
MalID int `json:"mal_id,omitempty"`
Type string `json:"type,omitempty"`
AnilistID int `json:"anilist_id,omitempty"`
AnisearchID int `json:"anisearch_id,omitempty"`
AnidbID int `json:"anidb_id,omitempty"`
NotifymoeID string `json:"notifymoe_id,omitempty"`
LivechartID int `json:"livechart_id,omitempty"`
ThetvdbID int `json:"thetvdb_id,omitempty"`
ImdbID string `json:"imdb_id,omitempty"`
ThemoviedbID string `json:"themoviedb_id,omitempty"`
}
Media struct {
Titles map[string]string `json:"titles"`
Episodes map[string]Episode `json:"episodes"`
EpisodeCount int `json:"episodeCount"`
SpecialCount int `json:"specialCount"`
Mappings *Mappings `json:"mappings"`
}
)
//----------------------------------------------------------------------------------------------------------------------
type Cache struct {
*result.Cache[string, *Media]
}
func NewCache() *Cache {
return &Cache{result.NewCache[string, *Media]()}
}
func GetCacheKey(from string, id int) string {
return from + strconv.Itoa(id)
}
//----------------------------------------------------------------------------------------------------------------------
// FetchAniZipMedia fetches anizip.Media from the AniZip API.
func FetchAniZipMedia(from string, id int) (*Media, error) {
// Event
reqEvent := &AnizipMediaRequestedEvent{
From: from,
Id: id,
Media: &Media{},
}
err := hook.GlobalHookManager.OnAnizipMediaRequested().Trigger(reqEvent)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if reqEvent.DefaultPrevented {
return reqEvent.Media, nil
}
from = reqEvent.From
id = reqEvent.Id
apiUrl := "https://api.ani.zip/v1/episodes?" + from + "_id=" + strconv.Itoa(id)
// Send an HTTP GET request
response, err := http.Get(apiUrl)
if err != nil {
return nil, err
}
defer response.Body.Close()
if response.StatusCode != 200 {
return nil, errors.New("not found on AniZip")
}
// Read the response body
responseBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
// Unmarshal the JSON data into AniZipData
var media Media
if err := json.Unmarshal(responseBody, &media); err != nil {
return nil, err
}
// Event
event := &AnizipMediaEvent{
Media: &media,
}
err = hook.GlobalHookManager.OnAnizipMedia().Trigger(event)
if err != nil {
return nil, err
}
// If the hook prevented the default behavior, return the data
if event.DefaultPrevented {
return event.Media, nil
}
return event.Media, nil
}
// FetchAniZipMediaC is the same as FetchAniZipMedia but uses a cache.
// If the media is found in the cache, it will be returned.
// If the media is not found in the cache, it will be fetched and then added to the cache.
func FetchAniZipMediaC(from string, id int, cache *Cache) (*Media, error) {
cacheV, ok := cache.Get(GetCacheKey(from, id))
if ok {
return cacheV, nil
}
media, err := FetchAniZipMedia(from, id)
if err != nil {
return nil, err
}
cache.Set(GetCacheKey(from, id), media)
return media, nil
}

View File

@@ -0,0 +1,65 @@
package anizip
func (m *Media) GetTitle() string {
if m == nil {
return ""
}
if len(m.Titles["en"]) > 0 {
return m.Titles["en"]
}
return m.Titles["ro"]
}
func (m *Media) GetMappings() *Mappings {
if m == nil {
return &Mappings{}
}
return m.Mappings
}
func (m *Media) FindEpisode(ep string) (*Episode, bool) {
if m.Episodes == nil {
return nil, false
}
episode, found := m.Episodes[ep]
if !found {
return nil, false
}
return &episode, true
}
func (m *Media) GetMainEpisodeCount() int {
if m == nil {
return 0
}
return m.EpisodeCount
}
// GetOffset returns the offset of the first episode relative to the absolute episode number.
// e.g, if the first episode's absolute number is 13, then the offset is 12.
func (m *Media) GetOffset() int {
if m == nil {
return 0
}
firstEp, found := m.FindEpisode("1")
if !found {
return 0
}
if firstEp.AbsoluteEpisodeNumber == 0 {
return 0
}
return firstEp.AbsoluteEpisodeNumber - 1
}
func (e *Episode) GetTitle() string {
eng, ok := e.Title["en"]
if ok {
return eng
}
rom, ok := e.Title["x-jat"]
if ok {
return rom
}
return ""
}

View File

@@ -0,0 +1,37 @@
package anizip
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestFetchAniZipMedia(t *testing.T) {
tests := []struct {
name string
provider string
id int
expectedTitle string
}{
{
name: "Cowboy Bebop",
provider: "anilist",
id: 1,
expectedTitle: "Cowboy Bebop",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
media, err := FetchAniZipMedia(test.provider, test.id)
if assert.NoError(t, err) {
if assert.NotNil(t, media) {
assert.Equal(t, media.GetTitle(), test.expectedTitle)
}
}
})
}
}

View File

@@ -0,0 +1,19 @@
package anizip
import "seanime/internal/hook_resolver"
// AnizipMediaRequestedEvent is triggered when the AniZip media is requested.
// Prevent default to skip the default behavior and return your own data.
type AnizipMediaRequestedEvent struct {
hook_resolver.Event
From string `json:"from"`
Id int `json:"id"`
// Empty data object, will be used if the hook prevents the default behavior
Media *Media `json:"media"`
}
// AnizipMediaEvent is triggered after processing AnizipMedia.
type AnizipMediaEvent struct {
hook_resolver.Event
Media *Media `json:"media"`
}

View File

@@ -0,0 +1,185 @@
package filler
import (
"fmt"
"seanime/internal/util"
"strings"
"github.com/adrg/strutil/metrics"
"github.com/gocolly/colly"
"github.com/rs/zerolog"
)
type (
SearchOptions struct {
Titles []string
}
SearchResult struct {
Slug string
Title string
}
API interface {
Search(opts SearchOptions) (*SearchResult, error)
FindFillerData(slug string) (*Data, error)
}
Data struct {
FillerEpisodes []string `json:"fillerEpisodes"`
}
)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type (
AnimeFillerList struct {
baseUrl string
userAgent string
logger *zerolog.Logger
}
)
func NewAnimeFillerList(logger *zerolog.Logger) *AnimeFillerList {
return &AnimeFillerList{
baseUrl: "https://www.animefillerlist.com",
userAgent: util.GetRandomUserAgent(),
logger: logger,
}
}
func (af *AnimeFillerList) Search(opts SearchOptions) (result *SearchResult, err error) {
defer util.HandlePanicInModuleWithError("api/metadata/filler/Search", &err)
c := colly.NewCollector(
colly.UserAgent(af.userAgent),
)
ret := make([]*SearchResult, 0)
c.OnHTML("div.Group > ul > li > a", func(e *colly.HTMLElement) {
ret = append(ret, &SearchResult{
Slug: e.Attr("href"),
Title: e.Text,
})
})
err = c.Visit(fmt.Sprintf("%s/shows", af.baseUrl))
if err != nil {
return nil, err
}
if len(ret) == 0 {
return nil, fmt.Errorf("no results found")
}
lev := metrics.NewLevenshtein()
lev.CaseSensitive = false
compResults := make([]struct {
OriginalValue string
Value string
Distance int
}, 0)
for _, result := range ret {
firstTitle := result.Title
secondTitle := ""
// Check if a second title exists between parentheses
if strings.LastIndex(firstTitle, " (") != -1 && strings.LastIndex(firstTitle, ")") != -1 {
secondTitle = firstTitle[strings.LastIndex(firstTitle, " (")+2 : strings.LastIndex(firstTitle, ")")]
if !util.IsMostlyLatinString(secondTitle) {
secondTitle = ""
}
}
if secondTitle != "" {
firstTitle = firstTitle[:strings.LastIndex(firstTitle, " (")]
}
for _, mediaTitle := range opts.Titles {
compResults = append(compResults, struct {
OriginalValue string
Value string
Distance int
}{
OriginalValue: result.Title,
Value: firstTitle,
Distance: lev.Distance(mediaTitle, firstTitle),
})
if secondTitle != "" {
compResults = append(compResults, struct {
OriginalValue string
Value string
Distance int
}{
OriginalValue: result.Title,
Value: secondTitle,
Distance: lev.Distance(mediaTitle, secondTitle),
})
}
}
}
// Find the best match
bestResult := struct {
OriginalValue string
Value string
Distance int
}{}
for _, result := range compResults {
if bestResult.OriginalValue == "" || result.Distance <= bestResult.Distance {
if bestResult.OriginalValue != "" && result.Distance == bestResult.Distance && len(result.OriginalValue) > len(bestResult.OriginalValue) {
continue
}
bestResult = result
}
}
if bestResult.OriginalValue == "" {
return nil, fmt.Errorf("no results found")
}
if bestResult.Distance > 10 {
return nil, fmt.Errorf("no results found")
}
// Get the result
for _, r := range ret {
if r.Title == bestResult.OriginalValue {
return r, nil
}
}
return
}
func (af *AnimeFillerList) FindFillerData(slug string) (ret *Data, err error) {
defer util.HandlePanicInModuleWithError("api/metadata/filler/FindFillerEpisodes", &err)
c := colly.NewCollector(
colly.UserAgent(af.userAgent),
)
ret = &Data{
FillerEpisodes: make([]string, 0),
}
fillerEps := make([]string, 0)
c.OnHTML("tr.filler", func(e *colly.HTMLElement) {
fillerEps = append(fillerEps, e.ChildText("td.Number"))
})
err = c.Visit(fmt.Sprintf("%s%s", af.baseUrl, slug))
if err != nil {
return nil, err
}
ret.FillerEpisodes = fillerEps
return
}

View File

@@ -0,0 +1,24 @@
package filler
import (
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
)
func TestAnimeFillerList_Search(t *testing.T) {
af := NewAnimeFillerList(util.NewLogger())
opts := SearchOptions{
Titles: []string{"Hunter x Hunter (2011)"},
}
ret, err := af.Search(opts)
if err != nil {
t.Error(err)
}
spew.Dump(ret)
}

View File

@@ -0,0 +1,186 @@
package mal
import (
"fmt"
"net/url"
)
const (
BaseAnimeFields string = "id,title,main_picture,alternative_titles,start_date,end_date,start_season,nsfw,synopsis,num_episodes,mean,rank,popularity,media_type,status"
)
type (
BasicAnime struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
AlternativeTitles struct {
Synonyms []string `json:"synonyms"`
En string `json:"en"`
Ja string `json:"ja"`
} `json:"alternative_titles"`
StartDate string `json:"start_date"`
EndDate string `json:"end_date"`
StartSeason struct {
Year int `json:"year"`
Season string `json:"season"`
} `json:"start_season"`
Synopsis string `json:"synopsis"`
NSFW string `json:"nsfw"`
NumEpisodes int `json:"num_episodes"`
Mean float32 `json:"mean"`
Rank int `json:"rank"`
Popularity int `json:"popularity"`
MediaType MediaType `json:"media_type"`
Status MediaStatus `json:"status"`
}
AnimeListEntry struct {
Node struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
} `json:"node"`
ListStatus struct {
Status MediaListStatus `json:"status"`
IsRewatching bool `json:"is_rewatching"`
NumEpisodesWatched int `json:"num_episodes_watched"`
Score int `json:"score"`
UpdatedAt string `json:"updated_at"`
} `json:"list_status"`
}
)
func (w *Wrapper) GetAnimeDetails(mId int) (*BasicAnime, error) {
w.logger.Debug().Int("mId", mId).Msg("mal: Getting anime details")
reqUrl := fmt.Sprintf("%s/anime/%d?fields=%s", ApiBaseURL, mId, BaseAnimeFields)
if w.AccessToken == "" {
return nil, fmt.Errorf("access token is empty")
}
var anime BasicAnime
err := w.doQuery("GET", reqUrl, nil, "application/json", &anime)
if err != nil {
w.logger.Error().Err(err).Int("mId", mId).Msg("mal: Failed to get anime details")
return nil, err
}
w.logger.Info().Int("mId", mId).Msg("mal: Fetched anime details")
return &anime, nil
}
func (w *Wrapper) GetAnimeCollection() ([]*AnimeListEntry, error) {
w.logger.Debug().Msg("mal: Getting anime collection")
reqUrl := fmt.Sprintf("%s/users/@me/animelist?fields=list_status&limit=1000", ApiBaseURL)
type response struct {
Data []*AnimeListEntry `json:"data"`
}
var data response
err := w.doQuery("GET", reqUrl, nil, "application/json", &data)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to get anime collection")
return nil, err
}
w.logger.Info().Msg("mal: Fetched anime collection")
return data.Data, nil
}
type AnimeListProgressParams struct {
NumEpisodesWatched *int
}
func (w *Wrapper) UpdateAnimeProgress(opts *AnimeListProgressParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating anime progress")
// Get anime details
anime, err := w.GetAnimeDetails(mId)
if err != nil {
return err
}
status := MediaListStatusWatching
if anime.Status == MediaStatusFinishedAiring && anime.NumEpisodes > 0 && anime.NumEpisodes <= *opts.NumEpisodesWatched {
status = MediaListStatusCompleted
}
if anime.NumEpisodes > 0 && *opts.NumEpisodesWatched > anime.NumEpisodes {
*opts.NumEpisodesWatched = anime.NumEpisodes
}
// Update MAL list entry
err = w.UpdateAnimeListStatus(&AnimeListStatusParams{
Status: &status,
NumEpisodesWatched: opts.NumEpisodesWatched,
}, mId)
if err == nil {
w.logger.Info().Int("mId", mId).Msg("mal: Updated anime progress")
}
return err
}
type AnimeListStatusParams struct {
Status *MediaListStatus
IsRewatching *bool
NumEpisodesWatched *int
Score *int
}
func (w *Wrapper) UpdateAnimeListStatus(opts *AnimeListStatusParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating anime list status")
reqUrl := fmt.Sprintf("%s/anime/%d/my_list_status", ApiBaseURL, mId)
// Build URL
urlData := url.Values{}
if opts.Status != nil {
urlData.Set("status", string(*opts.Status))
}
if opts.IsRewatching != nil {
urlData.Set("is_rewatching", fmt.Sprintf("%t", *opts.IsRewatching))
}
if opts.NumEpisodesWatched != nil {
urlData.Set("num_watched_episodes", fmt.Sprintf("%d", *opts.NumEpisodesWatched))
}
if opts.Score != nil {
urlData.Set("score", fmt.Sprintf("%d", *opts.Score))
}
encodedData := urlData.Encode()
err := w.doMutation("PATCH", reqUrl, encodedData)
if err != nil {
w.logger.Error().Err(err).Int("mId", mId).Msg("mal: Failed to update anime list status")
return err
}
return nil
}
func (w *Wrapper) DeleteAnimeListItem(mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Deleting anime list item")
reqUrl := fmt.Sprintf("%s/anime/%d/my_list_status", ApiBaseURL, mId)
err := w.doMutation("DELETE", reqUrl, "")
if err != nil {
w.logger.Error().Err(err).Int("mId", mId).Msg("mal: Failed to delete anime list item")
return err
}
w.logger.Info().Int("mId", mId).Msg("mal: Deleted anime list item")
return nil
}

View File

@@ -0,0 +1,62 @@
package mal
import (
"github.com/davecgh/go-spew/spew"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestGetAnimeDetails(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetAnimeDetails(51179)
spew.Dump(res)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
t.Log(res.Title)
}
func TestGetAnimeCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetAnimeCollection()
if err != nil {
t.Fatalf("error while fetching anime collection, %v", err)
}
for _, entry := range res {
t.Log(entry.Node.Title)
if entry.Node.ID == 51179 {
spew.Dump(entry)
}
}
}
func TestUpdateAnimeListStatus(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList(), test_utils.MyAnimeListMutation())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
mId := 51179
progress := 2
status := MediaListStatusWatching
err := malWrapper.UpdateAnimeListStatus(&AnimeListStatusParams{
Status: &status,
NumEpisodesWatched: &progress,
}, mId)
if err != nil {
t.Fatalf("error while fetching media, %v", err)
}
}

View File

@@ -0,0 +1,185 @@
package mal
import (
"fmt"
"net/url"
)
const (
BaseMangaFields string = "id,title,main_picture,alternative_titles,start_date,end_date,nsfw,synopsis,num_volumes,num_chapters,mean,rank,popularity,media_type,status"
)
type (
BasicManga struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
AlternativeTitles struct {
Synonyms []string `json:"synonyms"`
En string `json:"en"`
Ja string `json:"ja"`
} `json:"alternative_titles"`
StartDate string `json:"start_date"`
EndDate string `json:"end_date"`
Synopsis string `json:"synopsis"`
NSFW string `json:"nsfw"`
NumVolumes int `json:"num_volumes"`
NumChapters int `json:"num_chapters"`
Mean float32 `json:"mean"`
Rank int `json:"rank"`
Popularity int `json:"popularity"`
MediaType MediaType `json:"media_type"`
Status MediaStatus `json:"status"`
}
MangaListEntry struct {
Node struct {
ID int `json:"id"`
Title string `json:"title"`
MainPicture struct {
Medium string `json:"medium"`
Large string `json:"large"`
} `json:"main_picture"`
} `json:"node"`
ListStatus struct {
Status MediaListStatus `json:"status"`
IsRereading bool `json:"is_rereading"`
NumVolumesRead int `json:"num_volumes_read"`
NumChaptersRead int `json:"num_chapters_read"`
Score int `json:"score"`
UpdatedAt string `json:"updated_at"`
} `json:"list_status"`
}
)
func (w *Wrapper) GetMangaDetails(mId int) (*BasicManga, error) {
w.logger.Debug().Int("mId", mId).Msg("mal: Getting manga details")
reqUrl := fmt.Sprintf("%s/manga/%d?fields=%s", ApiBaseURL, mId, BaseMangaFields)
if w.AccessToken == "" {
return nil, fmt.Errorf("access token is empty")
}
var manga BasicManga
err := w.doQuery("GET", reqUrl, nil, "application/json", &manga)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to get manga details")
return nil, err
}
w.logger.Info().Int("mId", mId).Msg("mal: Fetched manga details")
return &manga, nil
}
func (w *Wrapper) GetMangaCollection() ([]*MangaListEntry, error) {
w.logger.Debug().Msg("mal: Getting manga collection")
reqUrl := fmt.Sprintf("%s/users/@me/mangalist?fields=list_status&limit=1000", ApiBaseURL)
type response struct {
Data []*MangaListEntry `json:"data"`
}
var data response
err := w.doQuery("GET", reqUrl, nil, "application/json", &data)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to get manga collection")
return nil, err
}
w.logger.Info().Msg("mal: Fetched manga collection")
return data.Data, nil
}
type MangaListProgressParams struct {
NumChaptersRead *int
}
func (w *Wrapper) UpdateMangaProgress(opts *MangaListProgressParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating manga progress")
// Get manga details
manga, err := w.GetMangaDetails(mId)
if err != nil {
return err
}
status := MediaListStatusReading
if manga.Status == MediaStatusFinished && manga.NumChapters > 0 && manga.NumChapters <= *opts.NumChaptersRead {
status = MediaListStatusCompleted
}
if manga.NumChapters > 0 && *opts.NumChaptersRead > manga.NumChapters {
*opts.NumChaptersRead = manga.NumChapters
}
// Update MAL list entry
err = w.UpdateMangaListStatus(&MangaListStatusParams{
Status: &status,
NumChaptersRead: opts.NumChaptersRead,
}, mId)
if err == nil {
w.logger.Info().Int("mId", mId).Msg("mal: Updated manga progress")
}
return err
}
type MangaListStatusParams struct {
Status *MediaListStatus
IsRereading *bool
NumChaptersRead *int
Score *int
}
func (w *Wrapper) UpdateMangaListStatus(opts *MangaListStatusParams, mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Updating manga list status")
reqUrl := fmt.Sprintf("%s/manga/%d/my_list_status", ApiBaseURL, mId)
// Build URL
urlData := url.Values{}
if opts.Status != nil {
urlData.Set("status", string(*opts.Status))
}
if opts.IsRereading != nil {
urlData.Set("is_rereading", fmt.Sprintf("%t", *opts.IsRereading))
}
if opts.NumChaptersRead != nil {
urlData.Set("num_chapters_read", fmt.Sprintf("%d", *opts.NumChaptersRead))
}
if opts.Score != nil {
urlData.Set("score", fmt.Sprintf("%d", *opts.Score))
}
encodedData := urlData.Encode()
err := w.doMutation("PATCH", reqUrl, encodedData)
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to update manga list status")
return err
}
return nil
}
func (w *Wrapper) DeleteMangaListItem(mId int) error {
w.logger.Debug().Int("mId", mId).Msg("mal: Deleting manga list item")
reqUrl := fmt.Sprintf("%s/manga/%d/my_list_status", ApiBaseURL, mId)
err := w.doMutation("DELETE", reqUrl, "")
if err != nil {
w.logger.Error().Err(err).Msg("mal: Failed to delete manga list item")
return err
}
w.logger.Info().Int("mId", mId).Msg("mal: Deleted manga list item")
return nil
}

Some files were not shown because too many files have changed in this diff Show More