Compare commits
29 Commits
v0.0.6-alp
...
main
Author | SHA1 | Date | |
---|---|---|---|
0d6bae97ab | |||
cd9b7412b8 | |||
29dd7fc058 | |||
2553a35a4a | |||
45f7eff8b1 | |||
39c7389a0a | |||
ee09d2b468 | |||
cc67966063 | |||
cdf263b611 | |||
8c3b59d622 | |||
6d212864c5 | |||
8ea32e61ba | |||
ad17f5d3ac | |||
cc01ac18a5 | |||
901785525f | |||
f358a74079 | |||
7fd4acd238 | |||
c04a8d53c6 | |||
2c969680e0 | |||
45c9bd3b40 | |||
47f6ba2104 | |||
156b8fbb65 | |||
8702d3e93f | |||
71a2ac545b | |||
fa206a1bb7 | |||
cb68360f2f | |||
1fbe80f0b9 | |||
d91ca35a83 | |||
4490c817db |
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
nite07/pcgamedb
|
||||
nite07/game-crawler
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
@ -1,4 +1,4 @@
|
||||
project_name: pcgamedb
|
||||
project_name: game-crawler
|
||||
before:
|
||||
hooks:
|
||||
- go install github.com/swaggo/swag/cmd/swag@latest
|
||||
@ -16,7 +16,7 @@ builds:
|
||||
- arm
|
||||
- "386"
|
||||
ldflags:
|
||||
- -s -w -X pcgamedb/constant.Version={{ .Version }}
|
||||
- -s -w -X game-crawler/constant.Version={{ .Version }}
|
||||
flags:
|
||||
- -trimpath
|
||||
archives:
|
||||
|
@ -14,10 +14,10 @@ ARG version=dev
|
||||
RUN if [ "$version" = "dev" ]; then \
|
||||
version=$(git describe --tags --always); \
|
||||
fi && \
|
||||
CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w -X pcgamedb/constant.Version=${version}" -o pcgamedb .
|
||||
CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w -X game-crawler/constant.Version=${version}" -o game-crawler .
|
||||
|
||||
FROM alpine:latest
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/pcgamedb /app/pcgamedb
|
||||
COPY --from=builder /app/game-crawler /app/game-crawler
|
||||
|
||||
ENTRYPOINT ["/app/pcgamedb", "server"]
|
||||
ENTRYPOINT ["/app/game-crawler", "server"]
|
11
README.md
11
README.md
@ -1,20 +1,23 @@
|
||||
# pcgamedb
|
||||
# game-crawler
|
||||
|
||||
pcgamedb is a powerful command-line tool designed to scrape and manage repack game data from various online sources. With support for multiple data sources and the ability to provide a RESTful API.
|
||||
game-crawler is a powerful command-line tool designed to scrape and manage repack game data from various online sources. With support for multiple data sources and the ability to provide a RESTful API.
|
||||
|
||||
## Features
|
||||
|
||||
- **Data Sources**:
|
||||
|
||||
- KaOSKrew(1337x)
|
||||
- DODI(1337x)
|
||||
- johncena141(1337x)
|
||||
- Fitgirl
|
||||
- KaOSKrew
|
||||
- DODI
|
||||
- FreeGOG
|
||||
- GOGGames
|
||||
- OnlineFix
|
||||
- Xatab
|
||||
- SteamRIP
|
||||
- Chovka
|
||||
- Omg_Gods(rutracker)
|
||||
- LinuxGame(rutracker)
|
||||
|
||||
- **Database**:
|
||||
|
||||
|
2
build.sh
2
build.sh
@ -1,4 +1,4 @@
|
||||
go install github.com/swaggo/swag/cmd/swag@latest
|
||||
swag init
|
||||
CGO_ENABLED=0
|
||||
go build -o pcgamedb -ldflags "-s -w -X pcgamedb/constant.Version=$(git describe --tags --always)" .
|
||||
go build -o game-crawler -ldflags "-s -w -X game-crawler/constant.Version=$(git describe --tags --always)" .
|
||||
|
13
cache/redis.go
vendored
13
cache/redis.go
vendored
@ -6,8 +6,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/log"
|
||||
"game-crawler/config"
|
||||
"game-crawler/log"
|
||||
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
@ -16,9 +16,6 @@ var cache *redis.Client
|
||||
var mutx = &sync.RWMutex{}
|
||||
|
||||
func connect() {
|
||||
if !config.Config.RedisAvaliable {
|
||||
return
|
||||
}
|
||||
cache = redis.NewClient(&redis.Options{
|
||||
Addr: fmt.Sprintf("%s:%d", config.Config.Redis.Host, config.Config.Redis.Port),
|
||||
Password: config.Config.Redis.Password,
|
||||
@ -69,14 +66,14 @@ func Get(key string) (string, bool) {
|
||||
return value, true
|
||||
}
|
||||
|
||||
func Add(key string, value interface{}) error {
|
||||
func Set(key string, value interface{}) error {
|
||||
CheckConnect()
|
||||
ctx := context.Background()
|
||||
cmd := cache.Set(ctx, key, value, 7*24*time.Hour)
|
||||
cmd := cache.Set(ctx, key, value, 0)
|
||||
return cmd.Err()
|
||||
}
|
||||
|
||||
func AddWithExpire(key string, value interface{}, expire time.Duration) error {
|
||||
func SetWithExpire(key string, value interface{}, expire time.Duration) error {
|
||||
CheckConnect()
|
||||
ctx := context.Background()
|
||||
cmd := cache.Set(ctx, key, value, expire)
|
||||
|
@ -1,8 +1,8 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"pcgamedb/log"
|
||||
"pcgamedb/task"
|
||||
"game-crawler/log"
|
||||
"game-crawler/task"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
@ -6,9 +6,9 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/log"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/log"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap"
|
||||
|
@ -4,9 +4,9 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
"go.uber.org/zap"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
@ -1,10 +1,10 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
)
|
||||
|
||||
type importCommandConfig struct {
|
||||
|
@ -1,8 +1,8 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap"
|
||||
|
@ -1,9 +1,9 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap"
|
||||
@ -35,7 +35,7 @@ func organizeRun(cmd *cobra.Command, args []string) {
|
||||
for _, game := range games {
|
||||
err := crawler.OrganizeGameItem(game)
|
||||
if err != nil {
|
||||
log.Logger.Error("failed to organize game item")
|
||||
log.Logger.Error("failed to organize game item", zap.String("name", game.Name), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
log.Logger.Info("game item organized", zap.String("name", game.Name))
|
||||
|
@ -4,9 +4,9 @@ import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
|
@ -1,8 +1,8 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/server"
|
||||
"game-crawler/config"
|
||||
"game-crawler/server"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
@ -23,7 +23,7 @@ var serverCmdCfg serverCommandConfig
|
||||
|
||||
func init() {
|
||||
serverCmd.Flags().StringVarP(&serverCmdCfg.Port, "port", "p", "8080", "server port")
|
||||
serverCmd.Flags().BoolVarP(&serverCmdCfg.AutoCrawl, "auto-crawl", "c", true, "enable auto crawl")
|
||||
serverCmd.Flags().BoolVarP(&serverCmdCfg.AutoCrawl, "auto-crawl", "c", false, "enable auto crawl")
|
||||
RootCmd.AddCommand(serverCmd)
|
||||
}
|
||||
|
||||
|
@ -1,9 +1,10 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/log"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
@ -13,7 +14,7 @@ var supplementCmd = &cobra.Command{
|
||||
Long: "Supplement platform id to game info",
|
||||
Short: "Supplement platform id to game info",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := crawler.SupplementPlatformIDToGameInfo(log.Logger)
|
||||
err := crawler.SupplementPlatformIDToGameInfo()
|
||||
if err != nil {
|
||||
log.Logger.Error("Error supplementing platform id to game info", zap.Error(err))
|
||||
}
|
||||
|
69
cmd/task.go
69
cmd/task.go
@ -1,8 +1,8 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"pcgamedb/log"
|
||||
"pcgamedb/task"
|
||||
"game-crawler/log"
|
||||
"game-crawler/task"
|
||||
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/cobra"
|
||||
@ -10,32 +10,63 @@ import (
|
||||
)
|
||||
|
||||
type taskCommandConfig struct {
|
||||
Crawl bool
|
||||
CrawlCron string
|
||||
Cron string
|
||||
Now bool
|
||||
}
|
||||
|
||||
var taskCmdCfg taskCommandConfig
|
||||
var taskCommandCfg taskCommandConfig
|
||||
|
||||
var crawlTaskCmd = &cobra.Command{
|
||||
Use: "crawl",
|
||||
Long: "Start crawl task",
|
||||
Short: "Start crawl task",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if taskCommandCfg.Now {
|
||||
task.Crawl(log.Logger)
|
||||
}
|
||||
c := cron.New()
|
||||
_, err := c.AddFunc(taskCommandCfg.Cron, func() { task.Crawl(log.Logger) })
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to add task", zap.Error(err))
|
||||
}
|
||||
c.Start()
|
||||
select {}
|
||||
},
|
||||
}
|
||||
|
||||
var updateTaskCmd = &cobra.Command{
|
||||
Use: "update",
|
||||
Long: "Start update outdated game infos task",
|
||||
Short: "Start update outdated game infos task",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if taskCommandCfg.Now {
|
||||
task.UpdateOutdatedGameInfos(log.Logger)
|
||||
}
|
||||
c := cron.New()
|
||||
_, err := c.AddFunc(taskCommandCfg.Cron, func() { task.UpdateOutdatedGameInfos(log.Logger) })
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to add task", zap.Error(err))
|
||||
}
|
||||
c.Start()
|
||||
select {}
|
||||
},
|
||||
}
|
||||
|
||||
var taskCmd = &cobra.Command{
|
||||
Use: "task",
|
||||
Long: "Start task",
|
||||
Short: "Start task",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if taskCmdCfg.Crawl {
|
||||
task.Crawl(log.Logger)
|
||||
c := cron.New()
|
||||
_, err := c.AddFunc(taskCmdCfg.CrawlCron, func() { task.Crawl(log.Logger) })
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to add task", zap.Error(err))
|
||||
}
|
||||
c.Start()
|
||||
select {}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
taskCmd.Flags().BoolVar(&taskCmdCfg.Crawl, "crawl", false, "enable auto crawl")
|
||||
taskCmd.Flags().StringVar(&taskCmdCfg.CrawlCron, "crawl-cron", "0 */3 * * *", "crawl cron expression")
|
||||
crawlTaskCmd.Flags().StringVar(&taskCommandCfg.Cron, "cron", "0 */3 * * *", "cron expression")
|
||||
crawlTaskCmd.Flags().BoolVar(&taskCommandCfg.Now, "now", false, "run task immediately")
|
||||
|
||||
updateTaskCmd.Flags().StringVar(&taskCommandCfg.Cron, "cron", "0 */3 * * *", "cron expression")
|
||||
updateTaskCmd.Flags().BoolVar(&taskCommandCfg.Now, "now", false, "run task immediately")
|
||||
|
||||
taskCmd.AddCommand(crawlTaskCmd)
|
||||
taskCmd.AddCommand(updateTaskCmd)
|
||||
|
||||
RootCmd.AddCommand(taskCmd)
|
||||
}
|
||||
|
@ -1,57 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var updateCmd = &cobra.Command{
|
||||
Use: "update",
|
||||
Long: "Update game info by game data platform",
|
||||
Short: "Update game info by game data platform",
|
||||
Run: updateRun,
|
||||
}
|
||||
|
||||
type updateCommandConfig struct {
|
||||
PlatformID int
|
||||
Platform string
|
||||
ID string
|
||||
}
|
||||
|
||||
var updateCmdcfx updateCommandConfig
|
||||
|
||||
func init() {
|
||||
updateCmd.Flags().IntVarP(&updateCmdcfx.PlatformID, "platform-id", "p", 0, "platform id")
|
||||
updateCmd.Flags().StringVarP(&updateCmdcfx.Platform, "platform", "t", "", "platform")
|
||||
updateCmd.Flags().StringVarP(&updateCmdcfx.ID, "game-id", "i", "", "game info id")
|
||||
RootCmd.AddCommand(updateCmd)
|
||||
}
|
||||
|
||||
func updateRun(cmd *cobra.Command, args []string) {
|
||||
id, err := primitive.ObjectIDFromHex(updateCmdcfx.ID)
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to parse game info id", zap.Error(err))
|
||||
return
|
||||
}
|
||||
oldInfo, err := db.GetGameInfoByID(id)
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to get game info", zap.Error(err))
|
||||
return
|
||||
}
|
||||
newInfo, err := crawler.GenerateGameInfo(updateCmdcfx.Platform, updateCmdcfx.PlatformID)
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to generate game info", zap.Error(err))
|
||||
return
|
||||
}
|
||||
newInfo.ID = id
|
||||
newInfo.GameIDs = oldInfo.GameIDs
|
||||
err = db.SaveGameInfo(newInfo)
|
||||
if err != nil {
|
||||
log.Logger.Error("Failed to save game info", zap.Error(err))
|
||||
}
|
||||
}
|
@ -4,15 +4,15 @@ import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"game-crawler/constant"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var versionCmd = &cobra.Command{
|
||||
Use: "version",
|
||||
Long: "Get version of pcgamedb",
|
||||
Short: "Get version of pcgamedb",
|
||||
Long: "Get version of game-crawler",
|
||||
Short: "Get version of game-crawler",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("Version: %s\n", constant.Version)
|
||||
fmt.Printf("Go: %s\n", runtime.Version())
|
||||
|
@ -1,10 +1,8 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"os/exec"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
@ -18,12 +16,14 @@ type config struct {
|
||||
Redis redis `json:"redis"`
|
||||
OnlineFix onlinefix `json:"online_fix"`
|
||||
Twitch twitch `json:"twitch"`
|
||||
Rutracker rutracker `json:"rutracker"`
|
||||
Webhooks webhooks `json:"webhooks"`
|
||||
CFClearanceScraper cfClearanceScraper `json:"cf_clearance_scraper"`
|
||||
DatabaseAvaliable bool
|
||||
OnlineFixAvaliable bool
|
||||
MegaAvaliable bool
|
||||
RedisAvaliable bool
|
||||
}
|
||||
|
||||
type rutracker struct {
|
||||
User string `env:"RUTRACKER_USER" json:"user"`
|
||||
Password string `env:"RUTRACKER_PASSWORD" json:"password"`
|
||||
}
|
||||
|
||||
type cfClearanceScraper struct {
|
||||
@ -81,7 +81,6 @@ func init() {
|
||||
User: "root",
|
||||
Password: "password",
|
||||
},
|
||||
MegaAvaliable: TestMega(),
|
||||
Server: server{
|
||||
AutoCrawlCron: "0 */3 * * *",
|
||||
},
|
||||
@ -97,11 +96,20 @@ func init() {
|
||||
}
|
||||
}
|
||||
loadEnvVariables(&Config)
|
||||
Config.OnlineFixAvaliable = Config.OnlineFix.User != "" && Config.OnlineFix.Password != ""
|
||||
Config.RedisAvaliable = Config.Redis.Host != ""
|
||||
Config.DatabaseAvaliable = Config.Database.Database != "" && Config.Database.Host != ""
|
||||
|
||||
if Config.CFClearanceScraper.Url != "" && !strings.HasSuffix(Config.CFClearanceScraper.Url, "/cf-clearance-scraper") {
|
||||
if Config.OnlineFix.User == "" || Config.OnlineFix.Password == "" {
|
||||
panic("Need OnlineFix User and Password")
|
||||
}
|
||||
if Config.Redis.Host == "" {
|
||||
panic("Need Redis Host")
|
||||
}
|
||||
if Config.Database.Database == "" || Config.Database.Host == "" {
|
||||
panic("Need Database Name and Host")
|
||||
}
|
||||
if Config.CFClearanceScraper.Url == "" {
|
||||
panic("Need CF Clearance Scraper URL")
|
||||
}
|
||||
if !strings.HasSuffix(Config.CFClearanceScraper.Url, "/cf-clearance-scraper") {
|
||||
Config.CFClearanceScraper.Url += "/cf-clearance-scraper"
|
||||
}
|
||||
}
|
||||
@ -141,11 +149,3 @@ func loadEnvVariables(cfg interface{}) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMega() bool {
|
||||
cmd := exec.Command("mega-get", "--help")
|
||||
var out bytes.Buffer
|
||||
cmd.Stdout = &out
|
||||
err := cmd.Run()
|
||||
return err == nil
|
||||
}
|
||||
|
@ -24,10 +24,14 @@ const (
|
||||
Steam250Top250URL = "https://steam250.com/top250"
|
||||
Steam250BestOfTheYearURL = "https://steam250.com/%v"
|
||||
Steam250WeekTop50URL = "https://steam250.com/7day"
|
||||
Steam250MonthTop50URL = "https://steam250.com/30day"
|
||||
Steam250MostPlayedURL = "https://steam250.com/most_played"
|
||||
FitGirlURL = "https://fitgirl-repacks.site/page/%v/"
|
||||
SteamRIPBaseURL = "https://steamrip.com"
|
||||
SteamRIPGameListURL = "https://steamrip.com/games-list-page/"
|
||||
RepackInfoURL = "https://repack.info/page/%v/"
|
||||
GnarlyURL = "https://rentry.org/gnarly_repacks"
|
||||
RutrackerTopicURL = "https://rutracker.org/forum/%s"
|
||||
RutrackerURL = "https://rutracker.org/forum/index.php"
|
||||
RutrackerLoginURL = "https://rutracker.org/forum/login.php"
|
||||
)
|
||||
|
172
crawler/1337x.go
172
crawler/1337x.go
@ -8,169 +8,221 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Formatter func(string) string
|
||||
type FormatterFunc func(string) string
|
||||
|
||||
type s1337xCrawler struct {
|
||||
source string
|
||||
formatter Formatter
|
||||
platform string
|
||||
formatter FormatterFunc
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func New1337xCrawler(source string, formatter Formatter, logger *zap.Logger) *s1337xCrawler {
|
||||
func New1337xCrawler(source string, platform string, formatter FormatterFunc, logger *zap.Logger) *s1337xCrawler {
|
||||
return &s1337xCrawler{
|
||||
source: source,
|
||||
formatter: formatter,
|
||||
logger: logger,
|
||||
platform: platform,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *s1337xCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
var resp *utils.FetchResponse
|
||||
var doc *goquery.Document
|
||||
var err error
|
||||
c.logger.Info("Starting Crawl", zap.Int("Page", page), zap.String("Source", c.source))
|
||||
requestUrl := fmt.Sprintf("%s/%s/%d/", constant.C1337xBaseURL, c.source, page)
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Url: requestUrl,
|
||||
})
|
||||
resp, err := utils.Request().Get(requestUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch page", zap.String("URL", requestUrl), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch page %d for source %s: %w", page, c.source, err)
|
||||
}
|
||||
doc, err = goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.String("URL", requestUrl), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document for page %d: %w", page, err)
|
||||
}
|
||||
|
||||
trSelection := doc.Find("tbody>tr")
|
||||
var urls []string
|
||||
trSelection.Each(func(i int, trNode *goquery.Selection) {
|
||||
nameSelection := trNode.Find(".name").First()
|
||||
if aNode := nameSelection.Find("a").Eq(1); aNode.Length() > 0 {
|
||||
url, _ := aNode.Attr("href")
|
||||
urls = append(urls, url)
|
||||
url, exists := aNode.Attr("href")
|
||||
if exists {
|
||||
urls = append(urls, url)
|
||||
} else {
|
||||
c.logger.Warn("Failed to find URL in row", zap.Int("RowIndex", i))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var res []*model.GameItem
|
||||
for _, u := range urls {
|
||||
u = fmt.Sprintf("%s%s", constant.C1337xBaseURL, u)
|
||||
if db.IsGameCrawledByURL(u) {
|
||||
fullURL := fmt.Sprintf("%s%s", constant.C1337xBaseURL, u)
|
||||
if db.IsGameCrawledByURL(fullURL) {
|
||||
c.logger.Info("Skipping already crawled URL", zap.String("URL", fullURL))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
|
||||
c.logger.Info("Crawling URL", zap.String("URL", fullURL))
|
||||
item, err := c.CrawlByUrl(fullURL)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl URL", zap.String("URL", fullURL), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
err = db.SaveGameItem(item)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to save game item to database", zap.String("URL", fullURL), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, item)
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("URL", fullURL), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c.logger.Info("Finished Crawl", zap.Int("Page", page), zap.Int("ItemsCrawled", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *s1337xCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
})
|
||||
func (c *s1337xCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
c.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
resp, err := utils.Request().Get(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch URL", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch URL %s: %w", URL, err)
|
||||
}
|
||||
var item = &model.GameItem{}
|
||||
item.Url = url
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
item := &model.GameItem{
|
||||
Url: URL,
|
||||
}
|
||||
|
||||
selection := doc.Find(".torrent-detail-page ul.list>li")
|
||||
info := make(map[string]string)
|
||||
selection.Each(func(i int, item *goquery.Selection) {
|
||||
info[strings.TrimSpace(item.Find("strong").Text())] = strings.TrimSpace(item.Find("span").Text())
|
||||
key := strings.TrimSpace(item.Find("strong").Text())
|
||||
value := strings.TrimSpace(item.Find("span").Text())
|
||||
info[key] = value
|
||||
c.logger.Debug("Extracted info", zap.String("Key", key), zap.String("Value", value))
|
||||
})
|
||||
|
||||
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
|
||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data))
|
||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Body()))
|
||||
if len(magnetRegexRes) == 0 {
|
||||
c.logger.Error("Failed to find magnet link", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find magnet link on URL %s", URL)
|
||||
}
|
||||
|
||||
item.Size = info["Total size"]
|
||||
item.RawName = doc.Find("title").Text()
|
||||
item.RawName = strings.Replace(item.RawName, "Download ", "", 1)
|
||||
item.RawName = strings.TrimSpace(strings.Replace(item.RawName, "Torrent | 1337x", " ", 1))
|
||||
item.Name = c.formatter(item.RawName)
|
||||
item.Download = magnetRegexRes[0]
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": magnetRegexRes[0],
|
||||
}
|
||||
item.Author = strings.Replace(c.source, "-torrents", "", -1)
|
||||
item.Platform = c.platform
|
||||
|
||||
c.logger.Info("Successfully crawled URL", zap.String("URL", URL))
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (c *s1337xCrawler) CrawlMulti(pages []int) (res []*model.GameItem, err error) {
|
||||
var items []*model.GameItem
|
||||
c.logger.Info("Starting CrawlMulti", zap.Ints("Pages", pages), zap.String("Source", c.source))
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
for _, page := range pages {
|
||||
if page > totalPageNum {
|
||||
c.logger.Warn("Page exceeds total page number", zap.Int("Page", page), zap.Int("TotalPages", totalPageNum))
|
||||
continue
|
||||
}
|
||||
items, err = c.Crawl(page)
|
||||
res = append(res, items...)
|
||||
|
||||
items, err := c.Crawl(page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl page", zap.Int("Page", page), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Finished CrawlMulti", zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *s1337xCrawler) CrawlAll() (res []*model.GameItem, err error) {
|
||||
c.logger.Info("Starting CrawlAll", zap.String("Source", c.source))
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
var items []*model.GameItem
|
||||
|
||||
for i := 1; i <= totalPageNum; i++ {
|
||||
items, err = c.Crawl(i)
|
||||
res = append(res, items...)
|
||||
items, err := c.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl page", zap.Int("Page", i), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Finished CrawlAll", zap.Int("TotalPages", totalPageNum), zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *s1337xCrawler) GetTotalPageNum() (int, error) {
|
||||
var resp *utils.FetchResponse
|
||||
var doc *goquery.Document
|
||||
var err error
|
||||
|
||||
c.logger.Info("Fetching total page number", zap.String("Source", c.source))
|
||||
requestUrl := fmt.Sprintf("%s/%s/%d/", constant.C1337xBaseURL, c.source, 1)
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Url: requestUrl,
|
||||
})
|
||||
resp, err := utils.Request().Get(requestUrl)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to fetch first page for total page number", zap.String("URL", requestUrl), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to fetch first page for total page number: %w", err)
|
||||
}
|
||||
doc, _ = goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to parse HTML document for total page number", zap.String("URL", requestUrl), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse HTML document for total page number: %w", err)
|
||||
}
|
||||
|
||||
selection := doc.Find(".last")
|
||||
pageStr, exist := selection.Find("a").Attr("href")
|
||||
if !exist {
|
||||
return 0, errors.New("total page num not found")
|
||||
pageStr, exists := selection.Find("a").Attr("href")
|
||||
if !exists {
|
||||
c.logger.Error("Failed to find total page number in pagination", zap.String("URL", requestUrl))
|
||||
return 0, errors.New("total page number not found in pagination")
|
||||
}
|
||||
|
||||
pageStr = strings.ReplaceAll(pageStr, c.source, "")
|
||||
pageStr = strings.ReplaceAll(pageStr, "/", "")
|
||||
totalPageNum, err := strconv.Atoi(pageStr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to convert total page number to integer", zap.String("PageString", pageStr), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to convert total page number '%s' to integer: %w", pageStr, err)
|
||||
}
|
||||
|
||||
c.logger.Info("Successfully fetched total page number", zap.Int("TotalPages", totalPageNum))
|
||||
return totalPageNum, nil
|
||||
}
|
||||
|
@ -2,15 +2,15 @@ package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
@ -30,151 +30,184 @@ func (c *ChovkaCrawler) Name() string {
|
||||
return "ChovkaCrawler"
|
||||
}
|
||||
|
||||
func (c *ChovkaCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
})
|
||||
func (c *ChovkaCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
c.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
resp, err := utils.Request().Get(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch URL", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch URL %s: %w", URL, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document from URL %s: %w", URL, err)
|
||||
}
|
||||
item, err := db.GetGameItemByUrl(url)
|
||||
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to retrieve game item from database", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to retrieve game item from database for URL %s: %w", URL, err)
|
||||
}
|
||||
item.Url = url
|
||||
|
||||
item.Url = URL
|
||||
item.RawName = doc.Find(".inner-entry__title").First().Text()
|
||||
item.Name = ChovkaFormatter(item.RawName)
|
||||
item.Author = "Chovka"
|
||||
item.UpdateFlag = item.RawName
|
||||
item.Platform = "windows"
|
||||
|
||||
downloadURL := doc.Find(".download-torrent").AttrOr("href", "")
|
||||
if downloadURL == "" {
|
||||
return nil, errors.New("failed to find download URL")
|
||||
c.logger.Error("Download URL not found", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find download URL on page %s", URL)
|
||||
}
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Headers: map[string]string{"Referer": url},
|
||||
Url: downloadURL,
|
||||
})
|
||||
|
||||
resp, err = utils.Request().SetHeader("Referer", URL).Get(downloadURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch download URL", zap.String("downloadURL", downloadURL), zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch download URL %s for page %s: %w", downloadURL, URL, err)
|
||||
}
|
||||
magnet, size, err := utils.ConvertTorrentToMagnet(resp.Data)
|
||||
|
||||
magnet, size, err := utils.ConvertTorrentToMagnet(resp.Body())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to convert torrent to magnet", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to convert torrent to magnet for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
item.Size = size
|
||||
item.Download = magnet
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": magnet,
|
||||
}
|
||||
c.logger.Info("Successfully crawled URL", zap.String("URL", URL))
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (c *ChovkaCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: fmt.Sprintf(constant.RepackInfoURL, page),
|
||||
})
|
||||
c.logger.Info("Starting Crawl", zap.Int("Page", page))
|
||||
resp, err := utils.Request().Get(fmt.Sprintf(constant.RepackInfoURL, page))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch page", zap.Int("Page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch page %d: %w", page, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.Int("Page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document for page %d: %w", page, err)
|
||||
}
|
||||
|
||||
var urls []string
|
||||
var updateFlags []string
|
||||
doc.Find(".entry").Each(func(i int, s *goquery.Selection) {
|
||||
u, exist := s.Find(".entry__title.h2 a").Attr("href")
|
||||
if !exist {
|
||||
c.logger.Warn("Entry does not contain a valid URL", zap.Int("Index", i))
|
||||
return
|
||||
}
|
||||
urls = append(urls, u)
|
||||
updateFlags = append(updateFlags, s.Find(".entry__title.h2 a").Text())
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(s.Find(".entry__title.h2 a").Text())))
|
||||
})
|
||||
|
||||
var res []*model.GameItem
|
||||
for i, u := range urls {
|
||||
if db.IsChovkaCrawled(updateFlags[i]) {
|
||||
c.logger.Info("Skipping already crawled URL", zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
|
||||
c.logger.Info("Crawling URL", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl URL", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
if err := db.SaveGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to save game item to database", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, item)
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c.logger.Info("Finished Crawl", zap.Int("Page", page), zap.Int("ItemsCrawled", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *ChovkaCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting CrawlMulti", zap.Ints("Pages", pages))
|
||||
var res []*model.GameItem
|
||||
for _, page := range pages {
|
||||
items, err := c.Crawl(page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl multiple pages", zap.Int("Page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", page, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
c.logger.Info("Finished CrawlMulti", zap.Int("TotalPages", len(pages)), zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *ChovkaCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting CrawlAll")
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
var res []*model.GameItem
|
||||
for i := 1; i <= totalPageNum; i++ {
|
||||
items, err := c.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl all pages", zap.Int("Page", i), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", i, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Finished CrawlAll", zap.Int("TotalPages", totalPageNum), zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *ChovkaCrawler) GetTotalPageNum() (int, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: fmt.Sprintf(constant.RepackInfoURL, 1),
|
||||
})
|
||||
c.logger.Info("Fetching total page number")
|
||||
resp, err := utils.Request().Get(fmt.Sprintf(constant.RepackInfoURL, 1))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to fetch first page for total page number", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to fetch first page for total page number: %w", err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to parse HTML document for total page number", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse HTML document for total page number: %w", err)
|
||||
}
|
||||
|
||||
pageStr := doc.Find(".pagination>a").Last().Text()
|
||||
totalPageNum, err := strconv.Atoi(pageStr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to convert total page number to integer", zap.String("PageString", pageStr), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to convert total page number '%s' to integer: %w", pageStr, err)
|
||||
}
|
||||
|
||||
c.logger.Info("Successfully fetched total page number", zap.Int("TotalPages", totalPageNum))
|
||||
return totalPageNum, nil
|
||||
}
|
||||
|
||||
func ChovkaFormatter(name string) string {
|
||||
idx := strings.Index(name, "| RePack")
|
||||
if idx != -1 {
|
||||
name = name[:idx]
|
||||
}
|
||||
idx = strings.Index(name, "| GOG")
|
||||
if idx != -1 {
|
||||
name = name[:idx]
|
||||
}
|
||||
idx = strings.Index(name, "| Portable")
|
||||
if idx != -1 {
|
||||
name = name[:idx]
|
||||
cutoffs := []string{"| RePack", "| GOG", "| Portable"}
|
||||
for _, cutoff := range cutoffs {
|
||||
if idx := strings.Index(name, cutoff); idx != -1 {
|
||||
name = name[:idx]
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(name)
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
package crawler
|
||||
|
||||
import (
|
||||
"pcgamedb/model"
|
||||
"game-crawler/config"
|
||||
"game-crawler/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -10,6 +11,7 @@ type Crawler interface {
|
||||
Name() string
|
||||
Crawl(int) ([]*model.GameItem, error)
|
||||
CrawlAll() ([]*model.GameItem, error)
|
||||
CrawlByUrl(string) (*model.GameItem, error)
|
||||
}
|
||||
|
||||
type SimpleCrawler interface {
|
||||
@ -24,15 +26,18 @@ type PagedCrawler interface {
|
||||
|
||||
func BuildCrawlerMap(logger *zap.Logger) map[string]Crawler {
|
||||
ret := map[string]Crawler{
|
||||
"fitgirl": NewFitGirlCrawler(logger),
|
||||
"dodi": NewDODICrawler(logger),
|
||||
"kaoskrew": NewKaOsKrewCrawler(logger),
|
||||
"freegog": NewFreeGOGCrawler(logger),
|
||||
"xatab": NewXatabCrawler(logger),
|
||||
"onlinefix": NewOnlineFixCrawler(logger),
|
||||
"steamrip": NewSteamRIPCrawler(logger),
|
||||
"chovka": NewChovkaCrawler(logger),
|
||||
"goggames": NewGOGGamesCrawler(logger),
|
||||
"fitgirl": NewFitGirlCrawler(logger),
|
||||
"dodi": NewDODICrawler(logger),
|
||||
"kaoskrew": NewKaOsKrewCrawler(logger),
|
||||
"freegog": NewFreeGOGCrawler(config.Config.CFClearanceScraper.Url, logger),
|
||||
"xatab": NewXatabCrawler(logger),
|
||||
"onlinefix": NewOnlineFixCrawler(config.Config.OnlineFix.User, config.Config.OnlineFix.Password, logger),
|
||||
"steamrip": NewSteamRIPCrawler(logger),
|
||||
"chovka": NewChovkaCrawler(logger),
|
||||
"goggames": NewGOGGamesCrawler(config.Config.CFClearanceScraper.Url, logger),
|
||||
"johncena141": NewJohncena141Crawler(logger),
|
||||
"omggods": NewOmgGodsCrawler(config.Config.CFClearanceScraper.Url, config.Config.Rutracker.User, config.Config.Rutracker.Password, logger),
|
||||
"rutracker-linux-game": NewRutrackerLinuxGameCrawler(config.Config.CFClearanceScraper.Url, config.Config.Rutracker.User, config.Config.Rutracker.Password, logger),
|
||||
// "gnarly": NewGnarlyCrawler(logger),
|
||||
}
|
||||
return ret
|
||||
|
@ -4,8 +4,8 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -22,6 +22,7 @@ func NewDODICrawler(logger *zap.Logger) *DODICrawler {
|
||||
logger: logger,
|
||||
crawler: *New1337xCrawler(
|
||||
DODIName,
|
||||
"windows",
|
||||
DODIFormatter,
|
||||
logger,
|
||||
),
|
||||
|
@ -2,16 +2,16 @@ package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
@ -31,140 +31,185 @@ func (c *FitGirlCrawler) Name() string {
|
||||
return "FitGirlCrawler"
|
||||
}
|
||||
|
||||
func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
})
|
||||
func (c *FitGirlCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
c.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
resp, err := utils.Request().Get(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch URL", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch URL %s: %w", URL, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
titleElem := doc.Find("h3").First().Find("strong")
|
||||
if titleElem.Length() == 0 {
|
||||
return nil, errors.New("failed to find title")
|
||||
c.logger.Error("Failed to find title", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find title on page %s", URL)
|
||||
}
|
||||
|
||||
rawTitle := titleElem.Text()
|
||||
titleElem.Children().Remove()
|
||||
title := strings.TrimSpace(titleElem.Text())
|
||||
|
||||
sizeRegex := regexp.MustCompile(`Repack Size: <strong>(.*?)</strong>`)
|
||||
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data))
|
||||
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Body()))
|
||||
if len(sizeRegexRes) == 0 {
|
||||
return nil, errors.New("failed to find size")
|
||||
c.logger.Error("Failed to find size", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find size information on page %s", URL)
|
||||
}
|
||||
size := sizeRegexRes[1]
|
||||
|
||||
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
|
||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data))
|
||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Body()))
|
||||
if len(magnetRegexRes) == 0 {
|
||||
return nil, errors.New("failed to find magnet")
|
||||
c.logger.Error("Failed to find magnet link", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find magnet link on page %s", URL)
|
||||
}
|
||||
magnet := magnetRegexRes[0]
|
||||
item, err := db.GetGameItemByUrl(url)
|
||||
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to retrieve game item from database", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to retrieve game item from database for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
item.Name = strings.TrimSpace(title)
|
||||
item.RawName = rawTitle
|
||||
item.Url = url
|
||||
item.Url = URL
|
||||
item.Size = size
|
||||
item.Author = "FitGirl"
|
||||
item.Download = magnet
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": magnet,
|
||||
}
|
||||
item.Platform = "windows"
|
||||
|
||||
c.logger.Info("Successfully crawled URL", zap.String("URL", URL))
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (c *FitGirlCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: fmt.Sprintf(constant.FitGirlURL, page),
|
||||
})
|
||||
c.logger.Info("Starting Crawl", zap.Int("Page", page))
|
||||
resp, err := utils.Request().Get(fmt.Sprintf(constant.FitGirlURL, page))
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to fetch", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch page", zap.Int("Page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch page %d: %w", page, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.Int("Page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document for page %d: %w", page, err)
|
||||
}
|
||||
|
||||
var urls []string
|
||||
var updateFlags []string //link+date
|
||||
var updateFlags []string // link + date (encoded)
|
||||
doc.Find("article").Each(func(i int, s *goquery.Selection) {
|
||||
u, exist1 := s.Find(".entry-title>a").First().Attr("href")
|
||||
d, exist2 := s.Find("time").First().Attr("datetime")
|
||||
if exist1 && exist2 {
|
||||
urls = append(urls, u)
|
||||
updateFlags = append(updateFlags, fmt.Sprintf("%s%s", u, d))
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s%s", u, d))))
|
||||
} else {
|
||||
c.logger.Warn("Failed to extract URL or datetime", zap.Int("Index", i), zap.Bool("HasURL", exist1), zap.Bool("HasDate", exist2))
|
||||
}
|
||||
})
|
||||
|
||||
var res []*model.GameItem
|
||||
for i, u := range urls {
|
||||
if db.IsFitgirlCrawled(updateFlags[i]) {
|
||||
c.logger.Info("Skipping already crawled URL", zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
|
||||
c.logger.Info("Crawling URL", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl URL", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
err = db.SaveGameItem(item)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err))
|
||||
c.logger.Warn("Failed to save game item to database", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, item)
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c.logger.Info("Finished Crawl", zap.Int("Page", page), zap.Int("ItemsCrawled", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *FitGirlCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting CrawlMulti", zap.Ints("Pages", pages))
|
||||
var res []*model.GameItem
|
||||
for _, page := range pages {
|
||||
items, err := c.Crawl(page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl page", zap.Int("Page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", page, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Finished CrawlMulti", zap.Int("TotalPages", len(pages)), zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *FitGirlCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
var res []*model.GameItem
|
||||
c.logger.Info("Starting CrawlAll")
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
var res []*model.GameItem
|
||||
for i := 1; i <= totalPageNum; i++ {
|
||||
items, err := c.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl page", zap.Int("Page", i), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", i, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Finished CrawlAll", zap.Int("TotalPages", totalPageNum), zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *FitGirlCrawler) GetTotalPageNum() (int, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: fmt.Sprintf(constant.FitGirlURL, 1),
|
||||
})
|
||||
c.logger.Info("Fetching total page number")
|
||||
resp, err := utils.Request().Get(fmt.Sprintf(constant.FitGirlURL, 1))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to fetch first page for total page number", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to fetch first page for total page number: %w", err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse HTML document for total page number: %w", err)
|
||||
}
|
||||
page, err := strconv.Atoi(doc.Find(".page-numbers.dots").First().Next().Text())
|
||||
|
||||
pageStr := doc.Find(".page-numbers.dots").First().Next().Text()
|
||||
totalPageNum, err := strconv.Atoi(pageStr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to convert total page number to integer", zap.String("PageString", pageStr), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to convert total page number '%s' to integer: %w", pageStr, err)
|
||||
}
|
||||
return page, nil
|
||||
|
||||
c.logger.Info("Successfully fetched total page number", zap.Int("TotalPages", totalPageNum))
|
||||
return totalPageNum, nil
|
||||
}
|
||||
|
@ -3,135 +3,205 @@ package crawler
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"git.nite07.com/nite/ccs"
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type FreeGOGCrawler struct {
|
||||
logger *zap.Logger
|
||||
cfClearanceUrl string
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func NewFreeGOGCrawler(logger *zap.Logger) *FreeGOGCrawler {
|
||||
func NewFreeGOGCrawler(cfClearanceUrl string, logger *zap.Logger) *FreeGOGCrawler {
|
||||
return &FreeGOGCrawler{
|
||||
logger: logger,
|
||||
cfClearanceUrl: cfClearanceUrl,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *FreeGOGCrawler) getSession() (*ccs.Session, error) {
|
||||
cacheKey := "freegog_waf_session"
|
||||
var session ccs.Session
|
||||
if val, exist := cache.Get(cacheKey); exist {
|
||||
err := json.Unmarshal([]byte(val), &session)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to unmarshal cached session", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to unmarshal cached session: %w", err)
|
||||
}
|
||||
} else {
|
||||
var err error
|
||||
session, err = ccs.WAFSession(c.cfClearanceUrl, constant.FreeGOGListURL)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to create WAF session", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to create WAF session: %w", err)
|
||||
}
|
||||
jsonBytes, err := json.Marshal(session)
|
||||
if err == nil {
|
||||
_ = cache.SetWithExpire(cacheKey, jsonBytes, 1*time.Hour)
|
||||
}
|
||||
}
|
||||
return &session, nil
|
||||
}
|
||||
|
||||
func (c *FreeGOGCrawler) Name() string {
|
||||
return "FreeGOG"
|
||||
}
|
||||
|
||||
func (c *FreeGOGCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting Crawl", zap.Int("Num", num))
|
||||
count := 0
|
||||
session, err := utils.CCSWAFSession(config.Config.CFClearanceScraper.Url, constant.FreeGOGListURL)
|
||||
session, err := c.getSession()
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to create session", zap.Error(err))
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to get session: %w", err)
|
||||
}
|
||||
resp, err := utils.FetchWithWAFSession(utils.FetchConfig{
|
||||
Url: constant.FreeGOGListURL,
|
||||
}, session)
|
||||
|
||||
resp, err := ccs.RequestWithWAFSession(http.MethodGet, constant.FreeGOGListURL, *session, nil)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to fetch", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch FreeGOG list page", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch FreeGOG list page: %w", err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader([]byte(resp.Body)))
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document: %w", err)
|
||||
}
|
||||
|
||||
var urls []string
|
||||
var updateFlags []string //rawName+link
|
||||
var updateFlags []string // RawName+Link
|
||||
doc.Find(".items-outer li a").Each(func(i int, s *goquery.Selection) {
|
||||
urls = append(urls, s.AttrOr("href", ""))
|
||||
updateFlags = append(updateFlags, s.Text()+s.AttrOr("href", ""))
|
||||
url := s.AttrOr("href", "")
|
||||
rawName := s.Text()
|
||||
if url != "" && rawName != "" {
|
||||
urls = append(urls, url)
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(rawName+url)))
|
||||
} else {
|
||||
c.logger.Warn("Invalid URL or raw name found in item", zap.Int("Index", i), zap.String("URL", url), zap.String("RawName", rawName))
|
||||
}
|
||||
})
|
||||
|
||||
var res []*model.GameItem
|
||||
for i, u := range urls {
|
||||
if count == num {
|
||||
c.logger.Info("Reached target number of items", zap.Int("Count", count))
|
||||
break
|
||||
}
|
||||
if db.IsFreeGOGCrawled(updateFlags[i]) {
|
||||
c.logger.Info("Skipping already crawled URL", zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u, session)
|
||||
|
||||
c.logger.Info("Crawling URL", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl URL", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
err = db.SaveGameItem(item)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err))
|
||||
c.logger.Warn("Failed to save game item to database", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, item)
|
||||
count++
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c.logger.Info("Finished Crawl", zap.Int("TotalItems", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *FreeGOGCrawler) CrawlByUrl(url string, session *utils.WAFSession) (*model.GameItem, error) {
|
||||
resp, err := utils.FetchWithWAFSession(utils.FetchConfig{
|
||||
Url: url,
|
||||
}, session)
|
||||
func (c *FreeGOGCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
c.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
session, err := c.getSession()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get session", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get session: %w", err)
|
||||
}
|
||||
item, err := db.GetGameItemByUrl(url)
|
||||
|
||||
resp, err := ccs.RequestWithWAFSession(http.MethodGet, URL, *session, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch game page", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch game page %s: %w", URL, err)
|
||||
}
|
||||
item.Url = url
|
||||
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to retrieve game item from database", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to retrieve game item from database for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
item.Url = URL
|
||||
|
||||
// Extract raw title
|
||||
rawTitleRegex := regexp.MustCompile(`(?i)<h1 class="entry-title">(.*?)</h1>`)
|
||||
rawTitleRegexRes := rawTitleRegex.FindStringSubmatch(string(resp.Data))
|
||||
rawName := ""
|
||||
rawTitleRegexRes := rawTitleRegex.FindStringSubmatch(string(resp.Body))
|
||||
if len(rawTitleRegexRes) > 1 {
|
||||
rawName = html.UnescapeString(rawTitleRegexRes[1])
|
||||
rawName := html.UnescapeString(rawTitleRegexRes[1])
|
||||
item.RawName = strings.Replace(rawName, "–", "-", -1)
|
||||
} else {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to find raw title", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find raw title on page %s", URL)
|
||||
}
|
||||
|
||||
item.Name = FreeGOGFormatter(item.RawName)
|
||||
|
||||
// Extract size
|
||||
sizeRegex := regexp.MustCompile(`(?i)>Size:\s?(.*?)<`)
|
||||
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data))
|
||||
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Body))
|
||||
if len(sizeRegexRes) > 1 {
|
||||
item.Size = sizeRegexRes[1]
|
||||
} else {
|
||||
c.logger.Warn("Failed to find game size", zap.String("URL", URL))
|
||||
}
|
||||
|
||||
// Extract magnet link
|
||||
magnetRegex := regexp.MustCompile(`<a class="download-btn" href="https://gdl.freegogpcgames.xyz/download-gen\.php\?url=(.*?)"`)
|
||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data))
|
||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Body))
|
||||
if len(magnetRegexRes) > 1 {
|
||||
magnet, err := base64.StdEncoding.DecodeString(magnetRegexRes[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to decode magnet link", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to decode magnet link on page %s: %w", URL, err)
|
||||
}
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": string(magnet),
|
||||
}
|
||||
item.Download = string(magnet)
|
||||
} else {
|
||||
return nil, errors.New("failed to find magnet link")
|
||||
c.logger.Error("Failed to find magnet link", zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to find magnet link on page %s", URL)
|
||||
}
|
||||
|
||||
item.Author = "FreeGOG"
|
||||
item.Platform = "windows"
|
||||
|
||||
c.logger.Info("Successfully crawled URL", zap.String("URL", URL))
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (c *FreeGOGCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting CrawlAll")
|
||||
return c.Crawl(-1)
|
||||
}
|
||||
|
||||
@ -139,6 +209,7 @@ var freeGOGRegexps = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i)\(.*\)`),
|
||||
}
|
||||
|
||||
// FreeGOGFormatter formats the raw game name into a clean title.
|
||||
func FreeGOGFormatter(name string) string {
|
||||
for _, re := range freeGOGRegexps {
|
||||
name = re.ReplaceAllString(name, "")
|
||||
|
166
crawler/game.go
166
crawler/game.go
@ -2,19 +2,20 @@ package crawler
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
)
|
||||
|
||||
// GenerateGameInfo generates game info based on the platform and ID.
|
||||
func GenerateGameInfo(platform string, id int) (*model.GameInfo, error) {
|
||||
switch platform {
|
||||
case "steam":
|
||||
@ -22,59 +23,52 @@ func GenerateGameInfo(platform string, id int) (*model.GameInfo, error) {
|
||||
case "igdb":
|
||||
return GenerateIGDBGameInfo(id)
|
||||
default:
|
||||
return nil, errors.New("invalid ID type")
|
||||
return nil, errors.New("invalid platform type")
|
||||
}
|
||||
}
|
||||
|
||||
// OrganizeGameItem Organize and save GameInfo
|
||||
// OrganizeGameItem organizes the given game item and saves its associated game info to the database.
|
||||
func OrganizeGameItem(game *model.GameItem) error {
|
||||
hasOriganized, _ := db.HasGameItemOrganized(game.ID)
|
||||
if hasOriganized {
|
||||
hasOrganized := db.HasGameItemOrganized(game.ID)
|
||||
if hasOrganized {
|
||||
return nil
|
||||
}
|
||||
|
||||
item, err := OrganizeGameItemWithIGDB(0, game)
|
||||
if err == nil {
|
||||
if item.SteamID == 0 {
|
||||
// get steam id from igdb
|
||||
steamID, err := GetSteamIDByIGDBIDCache(item.IGDBID)
|
||||
if err == nil {
|
||||
item.SteamID = steamID
|
||||
}
|
||||
err = db.SaveGameInfo(item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
item, err := OrganizeGameItemWithIGDB(game)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Attempt to supplement SteamID if missing
|
||||
if item.SteamID == 0 {
|
||||
steamID, err := GetSteamIDByIGDBID(item.IGDBID)
|
||||
if err == nil {
|
||||
item.SteamID = steamID
|
||||
}
|
||||
}
|
||||
item, err = OrganizeGameItemWithSteam(0, game)
|
||||
if err == nil {
|
||||
if item.IGDBID == 0 {
|
||||
igdbID, err := GetIGDBIDBySteamIDCache(item.SteamID)
|
||||
if err == nil {
|
||||
item.IGDBID = igdbID
|
||||
}
|
||||
}
|
||||
err = db.SaveGameInfo(item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
||||
// Save the organized game info to the database
|
||||
if err := db.SaveGameInfo(item); err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
|
||||
func AddGameInfoManually(gameID primitive.ObjectID, platform string, plateformID int) (*model.GameInfo, error) {
|
||||
info, err := GenerateGameInfo(platform, plateformID)
|
||||
// AddGameInfoManually manually adds a game info entry to the database.
|
||||
func AddGameInfoManually(gameID primitive.ObjectID, platform string, platformID int) (*model.GameInfo, error) {
|
||||
info, err := GenerateGameInfo(platform, platformID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
info.GameIDs = append(info.GameIDs, gameID)
|
||||
info.GameIDs = utils.Unique(info.GameIDs)
|
||||
return info, db.SaveGameInfo(info)
|
||||
|
||||
info.GameIDs = utils.Unique(append(info.GameIDs, gameID))
|
||||
if err := db.SaveGameInfo(info); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
|
||||
// OrganizeGameItemManually organizes a game item manually based on the platform and platform ID.
|
||||
func OrganizeGameItemManually(gameID primitive.ObjectID, platform string, platformID int) (*model.GameInfo, error) {
|
||||
info, err := db.GetGameInfoByPlatformID(platform, platformID)
|
||||
if err != nil {
|
||||
@ -87,27 +81,30 @@ func OrganizeGameItemManually(gameID primitive.ObjectID, platform string, platfo
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
info.GameIDs = append(info.GameIDs, gameID)
|
||||
info.GameIDs = utils.Unique(info.GameIDs)
|
||||
err = db.SaveGameInfo(info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if platform == "igdb" {
|
||||
steamID, err := GetSteamIDByIGDBIDCache(platformID)
|
||||
|
||||
info.GameIDs = utils.Unique(append(info.GameIDs, gameID))
|
||||
|
||||
// Supplement missing platform IDs
|
||||
if platform == "igdb" && info.SteamID == 0 {
|
||||
steamID, err := GetSteamIDByIGDBID(platformID)
|
||||
if err == nil {
|
||||
info.SteamID = steamID
|
||||
}
|
||||
}
|
||||
if platform == "steam" {
|
||||
igdbID, err := GetIGDBIDBySteamIDCache(platformID)
|
||||
if platform == "steam" && info.IGDBID == 0 {
|
||||
igdbID, err := GetIGDBIDBySteamAppID(platformID)
|
||||
if err == nil {
|
||||
info.IGDBID = igdbID
|
||||
}
|
||||
}
|
||||
|
||||
if err := db.SaveGameInfo(info); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
|
||||
// FormatName formats a raw game name into a clean and consistent format.
|
||||
func FormatName(name string) string {
|
||||
name = regexp.MustCompile(`(?i)[\w’'-]+\s(Edition|Vision|Collection|Bundle|Pack|Deluxe)`).ReplaceAllString(name, " ")
|
||||
name = regexp.MustCompile(`(?i)GOTY`).ReplaceAllString(name, "")
|
||||
@ -121,35 +118,72 @@ func FormatName(name string) string {
|
||||
return name
|
||||
}
|
||||
|
||||
func SupplementPlatformIDToGameInfo(logger *zap.Logger) error {
|
||||
// SupplementPlatformIDToGameInfo supplements missing platform IDs (SteamID or IGDBID) for all game info entries.
|
||||
func SupplementPlatformIDToGameInfo() error {
|
||||
infos, err := db.GetAllGameInfos()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("failed to fetch game infos: %w", err)
|
||||
}
|
||||
|
||||
for _, info := range infos {
|
||||
changed := false
|
||||
|
||||
// Supplement SteamID using IGDBID
|
||||
if info.IGDBID != 0 && info.SteamID == 0 {
|
||||
steamID, err := GetSteamIDByIGDBIDCache(info.IGDBID)
|
||||
time.Sleep(time.Millisecond * 100)
|
||||
if err != nil {
|
||||
continue
|
||||
steamID, err := GetSteamIDByIGDBID(info.IGDBID)
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
if err == nil {
|
||||
info.SteamID = steamID
|
||||
changed = true
|
||||
}
|
||||
info.SteamID = steamID
|
||||
changed = true
|
||||
}
|
||||
|
||||
// Supplement IGDBID using SteamID
|
||||
if info.SteamID != 0 && info.IGDBID == 0 {
|
||||
igdbID, err := GetIGDBIDBySteamIDCache(info.SteamID)
|
||||
time.Sleep(time.Millisecond * 100)
|
||||
if err != nil {
|
||||
continue
|
||||
igdbID, err := GetIGDBIDBySteamAppID(info.SteamID)
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
if err == nil {
|
||||
info.IGDBID = igdbID
|
||||
changed = true
|
||||
}
|
||||
info.IGDBID = igdbID
|
||||
changed = true
|
||||
}
|
||||
|
||||
if changed {
|
||||
logger.Info("Supplemented platform id for game info", zap.String("name", info.Name), zap.Int("igdb", info.IGDBID), zap.Int("steam", info.SteamID))
|
||||
_ = db.SaveGameInfo(info)
|
||||
if err := db.SaveGameInfo(info); err != nil {
|
||||
return fmt.Errorf("failed to save game info: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateGameInfo updates outdated game info entries and returns a channel to monitor updates.
|
||||
func UpdateGameInfo(num int) (chan *model.GameInfo, error) {
|
||||
infos, err := db.GetOutdatedGameInfos(num)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch outdated game infos: %w", err)
|
||||
}
|
||||
|
||||
updateChan := make(chan *model.GameInfo)
|
||||
|
||||
go func() {
|
||||
defer close(updateChan)
|
||||
for _, info := range infos {
|
||||
if info.IGDBID != 0 {
|
||||
newInfo, err := GenerateIGDBGameInfo(info.IGDBID)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
db.MergeGameInfo(info, newInfo)
|
||||
if err := db.SaveGameInfo(newInfo); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
updateChan <- newInfo
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return updateChan, nil
|
||||
}
|
||||
|
@ -1,28 +1,31 @@
|
||||
package crawler
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"git.nite07.com/nite/ccs"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type GOGGamesCrawler struct {
|
||||
logger *zap.Logger
|
||||
cfClearanceUrl string
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func NewGOGGamesCrawler(logger *zap.Logger) *GOGGamesCrawler {
|
||||
func NewGOGGamesCrawler(cfClearanceUrl string, logger *zap.Logger) *GOGGamesCrawler {
|
||||
return &GOGGamesCrawler{
|
||||
logger: logger,
|
||||
cfClearanceUrl: cfClearanceUrl,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,116 +35,134 @@ func (c *GOGGamesCrawler) Name() string {
|
||||
|
||||
func (c *GOGGamesCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
if !strings.HasPrefix(URL, "https://www.gog-games.to/game/") {
|
||||
return nil, fmt.Errorf("invalid url")
|
||||
err := fmt.Errorf("invalid URL: %s", URL)
|
||||
c.logger.Error("Invalid URL", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
_, slug := path.Split(URL)
|
||||
|
||||
_, slug := path.Split(URL)
|
||||
apiUrl := fmt.Sprintf(constant.GOGGamesGameAPIURL, slug)
|
||||
|
||||
token, err := utils.CCSTurnstileToken(config.Config.CFClearanceScraper.Url, apiUrl, "0x4AAAAAAAfOlgvCKbOdW1zc")
|
||||
token, err := ccs.TurnstileToken(c.cfClearanceUrl, apiUrl, "0x4AAAAAAAfOlgvCKbOdW1zc")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get Turnstile token", zap.Error(err), zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to get Turnstile token for URL %s: %w", URL, err)
|
||||
}
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: apiUrl,
|
||||
Headers: map[string]string{
|
||||
"cf-turnstile-response": token,
|
||||
},
|
||||
})
|
||||
|
||||
resp, err := utils.Request().SetHeader("cf-turnstile-response", token).Get(apiUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch data from API", zap.Error(err), zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to fetch API data for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
data := gameResult{}
|
||||
err = json.Unmarshal(resp.Data, &data)
|
||||
err = json.Unmarshal(resp.Body(), &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to unmarshal API response", zap.Error(err), zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to parse API response for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
name := data.Title
|
||||
|
||||
// find download links
|
||||
fileHosters := []string{
|
||||
"gofile",
|
||||
"fileditch",
|
||||
"qiwi",
|
||||
"filesfm",
|
||||
"pixeldrain",
|
||||
"1fichier",
|
||||
}
|
||||
links := make([]string, 0)
|
||||
for _, h := range fileHosters {
|
||||
if value, exist := data.Links.Game[h]; exist {
|
||||
for _, link := range value.Links {
|
||||
links = append(links, link.Link)
|
||||
}
|
||||
links := make(map[string]string, 0)
|
||||
for _, v := range data.Links.Game {
|
||||
for _, link := range v.Links {
|
||||
links[fmt.Sprintf("%s(%s)", link.Label, v.Name)] = link.Link
|
||||
}
|
||||
if value, exist := data.Links.Patch[h]; exist {
|
||||
for _, link := range value.Links {
|
||||
links = append(links, link.Link)
|
||||
}
|
||||
}
|
||||
for _, v := range data.Links.Patch {
|
||||
for _, link := range v.Links {
|
||||
links[fmt.Sprintf("%s(%s)", link.Label, v.Name)] = link.Link
|
||||
}
|
||||
}
|
||||
for _, v := range data.Links.Goodie {
|
||||
for _, link := range v.Links {
|
||||
links[fmt.Sprintf("%s(%s)", link.Label, v.Name)] = link.Link
|
||||
}
|
||||
}
|
||||
|
||||
if len(links) == 0 {
|
||||
return nil, fmt.Errorf("no download link found")
|
||||
err := fmt.Errorf("no download links found for URL %s", URL)
|
||||
c.logger.Warn("No download links found", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Calculate total size
|
||||
size := uint64(0)
|
||||
for _, file := range data.Files.Game {
|
||||
s, _ := utils.SizeToBytes(file.Size)
|
||||
s, parseErr := utils.SizeToBytes(file.Size)
|
||||
if parseErr != nil {
|
||||
c.logger.Warn("Failed to parse file size", zap.Error(parseErr), zap.String("fileSize", file.Size))
|
||||
}
|
||||
size += s
|
||||
}
|
||||
|
||||
// Retrieve or create game item
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to retrieve game item from database", zap.Error(err), zap.String("URL", URL))
|
||||
return nil, fmt.Errorf("failed to get game item for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
item.Name = name
|
||||
item.RawName = name
|
||||
item.Download = strings.Join(links, ",")
|
||||
item.Downloads = links
|
||||
item.Url = URL
|
||||
item.Size = utils.BytesToSize(size)
|
||||
item.Author = "GOGGames"
|
||||
item.Platform = "windows"
|
||||
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (c *GOGGamesCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: fmt.Sprintf(constant.GOGGamesURL, page),
|
||||
})
|
||||
resp, err := utils.Request().Get(fmt.Sprintf(constant.GOGGamesURL, page))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch page", zap.Error(err), zap.Int("page", page))
|
||||
return nil, fmt.Errorf("failed to fetch page %d: %w", page, err)
|
||||
}
|
||||
|
||||
data := searchResult{}
|
||||
err = json.Unmarshal(resp.Data, &data)
|
||||
err = json.Unmarshal(resp.Body(), &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse page response", zap.Error(err), zap.Int("page", page))
|
||||
return nil, fmt.Errorf("failed to parse page %d: %w", page, err)
|
||||
}
|
||||
|
||||
urls := make([]string, 0)
|
||||
var updateFlags []string //link+date
|
||||
var updateFlags []string // link+date
|
||||
for _, item := range data.Data {
|
||||
if item.Infohash == "" {
|
||||
// skip unreleased games
|
||||
continue
|
||||
}
|
||||
urls = append(urls, fmt.Sprintf(constant.GOGGamesPageURL, item.Slug))
|
||||
updateFlags = append(updateFlags, fmt.Sprintf("%s%s", item.GogURL, item.LastUpdate))
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s%s", item.GogURL, item.LastUpdate))))
|
||||
}
|
||||
|
||||
res := make([]*model.GameItem, 0)
|
||||
for i, u := range urls {
|
||||
if db.IsGameCrawled(updateFlags[i], "GOGGames") {
|
||||
c.logger.Info("Game already crawled", zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
|
||||
c.logger.Info("Crawling game", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl game", zap.Error(err), zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
if err := db.SaveGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to save game item to database", zap.Error(err), zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, item)
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.Error(err), zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
}
|
||||
@ -153,7 +174,8 @@ func (c *GOGGamesCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
for _, page := range pages {
|
||||
items, err := c.Crawl(page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl multiple pages", zap.Error(err), zap.Int("page", page))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", page, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
@ -163,13 +185,17 @@ func (c *GOGGamesCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
func (c *GOGGamesCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
var res []*model.GameItem
|
||||
for i := 1; i <= totalPageNum; i++ {
|
||||
c.logger.Info("Crawling page", zap.Int("page", i))
|
||||
items, err := c.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Warn("Failed to crawl page", zap.Error(err), zap.Int("page", i))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", i, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
@ -177,17 +203,19 @@ func (c *GOGGamesCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
}
|
||||
|
||||
func (c *GOGGamesCrawler) GetTotalPageNum() (int, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: fmt.Sprintf(constant.GOGGamesURL, 1),
|
||||
})
|
||||
resp, err := utils.Request().Get(fmt.Sprintf(constant.GOGGamesURL, 1))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to fetch first page", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to fetch first page: %w", err)
|
||||
}
|
||||
|
||||
data := searchResult{}
|
||||
err = json.Unmarshal(resp.Data, &data)
|
||||
err = json.Unmarshal(resp.Body(), &data)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to parse first page response", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse first page response: %w", err)
|
||||
}
|
||||
|
||||
return data.Meta.LastPage, nil
|
||||
}
|
||||
|
||||
@ -249,39 +277,13 @@ type gameResult struct {
|
||||
Md5Filename string `json:"md5_filename"`
|
||||
Infohash string `json:"infohash"`
|
||||
Links struct {
|
||||
Goodie struct {
|
||||
OneFichier struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Links []struct {
|
||||
Label string `json:"label"`
|
||||
Link string `json:"link"`
|
||||
} `json:"links"`
|
||||
} `json:"1fichier"`
|
||||
Vikingfile struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Links []struct {
|
||||
Label string `json:"label"`
|
||||
Link string `json:"link"`
|
||||
} `json:"links"`
|
||||
} `json:"vikingfile"`
|
||||
Pixeldrain struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Links []struct {
|
||||
Label string `json:"label"`
|
||||
Link string `json:"link"`
|
||||
} `json:"links"`
|
||||
} `json:"pixeldrain"`
|
||||
Gofile struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Links []struct {
|
||||
Label string `json:"label"`
|
||||
Link string `json:"link"`
|
||||
} `json:"links"`
|
||||
} `json:"gofile"`
|
||||
Goodie map[string]struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Links []struct {
|
||||
Label string `json:"label"`
|
||||
Link string `json:"link"`
|
||||
} `json:"links"`
|
||||
} `json:"goodie"`
|
||||
Game map[string]struct {
|
||||
ID string `json:"id"`
|
||||
|
899
crawler/igdb.go
899
crawler/igdb.go
File diff suppressed because it is too large
Load Diff
66
crawler/johncena141.go
Normal file
66
crawler/johncena141.go
Normal file
@ -0,0 +1,66 @@
|
||||
package crawler
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"game-crawler/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
const Johncena141Name string = "johncena141-torrents"
|
||||
|
||||
type Johncena141Crawler struct {
|
||||
logger *zap.Logger
|
||||
crawler s1337xCrawler
|
||||
}
|
||||
|
||||
func NewJohncena141Crawler(logger *zap.Logger) *Johncena141Crawler {
|
||||
return &Johncena141Crawler{
|
||||
logger: logger,
|
||||
crawler: *New1337xCrawler(
|
||||
Johncena141Name,
|
||||
"linux",
|
||||
Johncena141Formatter,
|
||||
logger,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Johncena141Crawler) Name() string {
|
||||
return "Johncena141Crawler"
|
||||
}
|
||||
|
||||
func (c *Johncena141Crawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
return c.crawler.Crawl(page)
|
||||
}
|
||||
|
||||
func (c *Johncena141Crawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
return c.crawler.CrawlByUrl(url)
|
||||
}
|
||||
|
||||
func (c *Johncena141Crawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
return c.crawler.CrawlMulti(pages)
|
||||
}
|
||||
|
||||
func (c *Johncena141Crawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
return c.crawler.CrawlAll()
|
||||
}
|
||||
|
||||
func (c *Johncena141Crawler) GetTotalPageNum() (int, error) {
|
||||
return c.crawler.GetTotalPageNum()
|
||||
}
|
||||
|
||||
var Johncena141Regexps = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i)\s{2,}`),
|
||||
regexp.MustCompile(`(?i)[\-\+]\s?[^:\-]*?\s(Edition|Bundle|Pack|Set|Remake|Collection)`),
|
||||
}
|
||||
|
||||
func Johncena141Formatter(name string) string {
|
||||
nameslice := strings.Split(name, " - ")
|
||||
name = nameslice[0]
|
||||
reg1 := regexp.MustCompile(`(?i)\(.*?\)`)
|
||||
name = reg1.ReplaceAllString(name, "")
|
||||
return strings.TrimSpace(name)
|
||||
}
|
@ -4,7 +4,7 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/model"
|
||||
"game-crawler/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -21,6 +21,7 @@ func NewKaOsKrewCrawler(logger *zap.Logger) *KaOsKrewCrawler {
|
||||
logger: logger,
|
||||
crawler: *New1337xCrawler(
|
||||
KaOsKrewName,
|
||||
"windows",
|
||||
KaOsKrewFormatter,
|
||||
logger,
|
||||
),
|
||||
|
13
crawler/nxbrew.go
Normal file
13
crawler/nxbrew.go
Normal file
@ -0,0 +1,13 @@
|
||||
package crawler
|
||||
|
||||
import "go.uber.org/zap"
|
||||
|
||||
type NxbrewCrawler struct {
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func NewNxbrewCrawler(logger *zap.Logger) *NxbrewCrawler {
|
||||
return &NxbrewCrawler{
|
||||
logger: logger,
|
||||
}
|
||||
}
|
47
crawler/omg_gods.go
Normal file
47
crawler/omg_gods.go
Normal file
@ -0,0 +1,47 @@
|
||||
package crawler
|
||||
|
||||
import (
|
||||
"game-crawler/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type OmgGodsCrawler struct {
|
||||
logger *zap.Logger
|
||||
crawler RutrackerCrawler
|
||||
}
|
||||
|
||||
func NewOmgGodsCrawler(cfClearanceUrl, username, password string, logger *zap.Logger) *OmgGodsCrawler {
|
||||
return &OmgGodsCrawler{
|
||||
logger: logger,
|
||||
crawler: *NewRutrackerCrawler(
|
||||
"https://rutracker.org/forum/tracker.php?rid=8994327&start=%v",
|
||||
"OmgGods",
|
||||
"switch",
|
||||
username,
|
||||
password,
|
||||
cfClearanceUrl,
|
||||
logger,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *OmgGodsCrawler) Name() string {
|
||||
return "OmgGodsCrawler"
|
||||
}
|
||||
|
||||
func (c *OmgGodsCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
return c.crawler.CrawlByUrl(URL)
|
||||
}
|
||||
|
||||
func (c *OmgGodsCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
return c.crawler.Crawl(page)
|
||||
}
|
||||
|
||||
func (c *OmgGodsCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
return c.crawler.CrawlAll()
|
||||
}
|
||||
|
||||
func (c *OmgGodsCrawler) GetTotalPageNum() (int, error) {
|
||||
return c.crawler.GetTotalPageNum()
|
||||
}
|
@ -2,34 +2,38 @@ package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type OnlineFixCrawler struct {
|
||||
logger *zap.Logger
|
||||
cookies map[string]string
|
||||
username string
|
||||
password string
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func NewOnlineFixCrawler(logger *zap.Logger) *OnlineFixCrawler {
|
||||
func NewOnlineFixCrawler(username, password string, logger *zap.Logger) *OnlineFixCrawler {
|
||||
return &OnlineFixCrawler{
|
||||
logger: logger,
|
||||
cookies: map[string]string{},
|
||||
username: username,
|
||||
password: password,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
@ -38,279 +42,273 @@ func (c *OnlineFixCrawler) Name() string {
|
||||
}
|
||||
|
||||
func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
if !config.Config.OnlineFixAvaliable {
|
||||
c.logger.Error("Need Online Fix account")
|
||||
return nil, errors.New("online Fix is not available")
|
||||
}
|
||||
if len(c.cookies) == 0 {
|
||||
err := c.login()
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to login", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
cookies, err := c.getCookies()
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to get cookies", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get cookies: %w", err)
|
||||
}
|
||||
|
||||
requestURL := fmt.Sprintf("%s/page/%d/", constant.OnlineFixURL, page)
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: requestURL,
|
||||
Cookies: c.cookies,
|
||||
Headers: map[string]string{
|
||||
"Referer": constant.OnlineFixURL,
|
||||
},
|
||||
})
|
||||
resp, err := utils.Request().SetHeader("Referer", constant.OnlineFixURL).SetCookies(cookies).Get(requestURL)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to fetch", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch page", zap.String("url", requestURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch page %d: %w", page, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
body := utils.Windows1251ToUTF8(resp.Body())
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(body))
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML", zap.String("url", requestURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML for page %d: %w", page, err)
|
||||
}
|
||||
|
||||
var urls []string
|
||||
var updateFlags []string //link+date
|
||||
var updateFlags []string
|
||||
doc.Find("article.news").Each(func(i int, s *goquery.Selection) {
|
||||
urls = append(urls, s.Find(".big-link").First().AttrOr("href", ""))
|
||||
updateFlags = append(
|
||||
updateFlags,
|
||||
s.Find(".big-link").First().AttrOr("href", "")+
|
||||
s.Find("time").Text(),
|
||||
)
|
||||
url := s.Find(".big-link").First().AttrOr("href", "")
|
||||
if url != "" {
|
||||
urls = append(urls, url)
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(url+s.Find("time").Text())))
|
||||
}
|
||||
})
|
||||
|
||||
var res []*model.GameItem
|
||||
for i, u := range urls {
|
||||
// Skip already crawled links
|
||||
if db.IsOnlineFixCrawled(updateFlags[i]) {
|
||||
c.logger.Info("Skipping already crawled URL", zap.String("url", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
|
||||
c.logger.Info("Crawling URL", zap.String("url", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl URL", zap.String("url", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
err = db.SaveGameItem(item)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err))
|
||||
if err := db.SaveGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to save game item", zap.String("url", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
res = append(res, item)
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("url", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *OnlineFixCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
if len(c.cookies) == 0 {
|
||||
err := c.login()
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to login", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
Cookies: c.cookies,
|
||||
Headers: map[string]string{
|
||||
"Referer": constant.OnlineFixURL,
|
||||
},
|
||||
})
|
||||
func (c *OnlineFixCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
cookies, err := c.getCookies()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get cookies", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get cookies: %w", err)
|
||||
}
|
||||
|
||||
resp, err := utils.Request().SetHeaders(map[string]string{
|
||||
"Referer": constant.OnlineFixURL,
|
||||
}).SetCookies(cookies).Get(URL)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to fetch URL", zap.String("url", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
body := utils.Windows1251ToUTF8(resp.Body())
|
||||
titleRegex := regexp.MustCompile(`(?i)<h1.*?>(.*?)</h1>`)
|
||||
titleRegexRes := titleRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
if len(titleRegexRes) == 0 {
|
||||
return nil, errors.New("failed to find title")
|
||||
titleMatch := titleRegex.FindStringSubmatch(string(body))
|
||||
if len(titleMatch) == 0 {
|
||||
c.logger.Warn("Failed to find title in HTML", zap.String("url", URL))
|
||||
return nil, errors.New("failed to find title in HTML")
|
||||
}
|
||||
downloadRegex := regexp.MustCompile(`(?i)<a[^>]*\bhref="([^"]+)"[^>]*>(Скачать Torrent|Скачать торрент)</a>`)
|
||||
downloadRegexRes := downloadRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
if len(downloadRegexRes) == 0 {
|
||||
|
||||
downloadRegex := regexp.MustCompile(`(?i)<a[^>]+\bhref="([^"]+)"[^>]+>(Скачать Torrent|Скачать торрент)</a>`)
|
||||
downloadMatch := downloadRegex.FindStringSubmatch(string(body))
|
||||
if len(downloadMatch) == 0 {
|
||||
c.logger.Warn("Failed to find download button", zap.String("url", URL))
|
||||
return nil, errors.New("failed to find download button")
|
||||
}
|
||||
item, err := db.GetGameItemByUrl(url)
|
||||
|
||||
// Retrieve or create game item
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
item = &model.GameItem{}
|
||||
}
|
||||
item.RawName = titleRegexRes[0][1]
|
||||
|
||||
item.RawName = titleMatch[1]
|
||||
item.Name = OnlineFixFormatter(item.RawName)
|
||||
item.Url = url
|
||||
item.Url = URL
|
||||
item.Author = "OnlineFix"
|
||||
item.Size = "0"
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Url: downloadRegexRes[0][1],
|
||||
Cookies: c.cookies,
|
||||
Headers: map[string]string{
|
||||
"Referer": url,
|
||||
},
|
||||
})
|
||||
|
||||
// Handle download links
|
||||
downloadURL := downloadMatch[1]
|
||||
resp, err = utils.Request().SetHeader("Referer", URL).SetCookies(cookies).Get(downloadURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch download link", zap.String("url", downloadURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch download link %s: %w", downloadURL, err)
|
||||
}
|
||||
if strings.Contains(downloadRegexRes[0][1], "uploads.online-fix.me") {
|
||||
|
||||
body = utils.Windows1251ToUTF8(resp.Body())
|
||||
if strings.Contains(downloadURL, "uploads.online-fix.me") {
|
||||
// Handle torrent file
|
||||
magnetRegex := regexp.MustCompile(`(?i)"(.*?).torrent"`)
|
||||
magnetRegexRes := magnetRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
if len(magnetRegexRes) == 0 {
|
||||
return nil, errors.New("failed to find magnet")
|
||||
magnetMatch := magnetRegex.FindStringSubmatch(string(body))
|
||||
if len(magnetMatch) == 0 {
|
||||
c.logger.Warn("Failed to find torrent magnet link", zap.String("url", downloadURL))
|
||||
return nil, errors.New("failed to find torrent magnet link")
|
||||
}
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Url: downloadRegexRes[0][1] + strings.Trim(magnetRegexRes[0][0], "\""),
|
||||
Cookies: c.cookies,
|
||||
Headers: map[string]string{
|
||||
"Referer": url,
|
||||
},
|
||||
})
|
||||
|
||||
torrentURL := downloadURL + strings.Trim(magnetMatch[0], "\"")
|
||||
resp, err = utils.Request().SetHeader("Referer", URL).SetCookies(cookies).Get(torrentURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch torrent file", zap.String("url", torrentURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch torrent file %s: %w", torrentURL, err)
|
||||
}
|
||||
item.Download, item.Size, err = utils.ConvertTorrentToMagnet(resp.Data)
|
||||
|
||||
magnet, size, err := utils.ConvertTorrentToMagnet(resp.Body())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to convert torrent to magnet", zap.String("url", torrentURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to convert torrent to magnet: %w", err)
|
||||
}
|
||||
} else if strings.Contains(downloadRegexRes[0][1], "online-fix.me/ext") {
|
||||
if strings.Contains(string(resp.Data), "mega.nz") {
|
||||
if !config.Config.MegaAvaliable {
|
||||
return nil, errors.New("mega is not avaliable")
|
||||
}
|
||||
megaRegex := regexp.MustCompile(`(?i)location.href=\\'([^\\']*)\\'`)
|
||||
megaRegexRes := megaRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
if len(megaRegexRes) == 0 {
|
||||
return nil, errors.New("failed to find download link")
|
||||
}
|
||||
path, files, err := utils.MegaDownload(megaRegexRes[0][1], "torrent")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
torrent := ""
|
||||
for _, file := range files {
|
||||
if strings.HasSuffix(file, ".torrent") {
|
||||
torrent = file
|
||||
break
|
||||
}
|
||||
}
|
||||
dataBytes, err := os.ReadFile(torrent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
item.Download, item.Size, err = utils.ConvertTorrentToMagnet(dataBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_ = os.RemoveAll(path)
|
||||
} else {
|
||||
return nil, errors.New("failed to find download link")
|
||||
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": magnet,
|
||||
}
|
||||
item.Size = size
|
||||
} else {
|
||||
return nil, errors.New("failed to find download link")
|
||||
c.logger.Warn("Unsupported download link format", zap.String("url", downloadURL))
|
||||
return nil, errors.New("unsupported download link format")
|
||||
}
|
||||
|
||||
item.Platform = "windows"
|
||||
return item, nil
|
||||
}
|
||||
|
||||
// Crawl multiple pages
|
||||
func (c *OnlineFixCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
var res []*model.GameItem
|
||||
for _, page := range pages {
|
||||
items, err := c.Crawl(page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to crawl page", zap.Int("page", page), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", page, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// Crawl all pages
|
||||
func (c *OnlineFixCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
var res []*model.GameItem
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
for i := 1; i <= totalPageNum; i++ {
|
||||
items, err := c.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Warn("Failed to crawl page", zap.Int("page", i), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// Get total page number
|
||||
func (c *OnlineFixCrawler) GetTotalPageNum() (int, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: constant.OnlineFixURL,
|
||||
Headers: map[string]string{
|
||||
"Referer": constant.OnlineFixURL,
|
||||
},
|
||||
})
|
||||
resp, err := utils.Request().SetHeader("Referer", constant.OnlineFixURL).Get(constant.OnlineFixURL)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to fetch main page", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to fetch main page: %w", err)
|
||||
}
|
||||
|
||||
pageRegex := regexp.MustCompile(`(?i)<a href="https://online-fix.me/page/(\d+)/">.*?</a>`)
|
||||
pageRegexRes := pageRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
if len(pageRegexRes) == 0 {
|
||||
return 0, err
|
||||
pageMatches := pageRegex.FindAllStringSubmatch(string(resp.Body()), -1)
|
||||
if len(pageMatches) < 2 {
|
||||
c.logger.Warn("Failed to find total page number in HTML")
|
||||
return 0, errors.New("failed to find total page number")
|
||||
}
|
||||
totalPageNum, err := strconv.Atoi(pageRegexRes[len(pageRegexRes)-2][1])
|
||||
|
||||
totalPageNum, err := strconv.Atoi(pageMatches[len(pageMatches)-2][1])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to parse total page number", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse total page number: %w", err)
|
||||
}
|
||||
|
||||
return totalPageNum, nil
|
||||
}
|
||||
|
||||
type csrf struct {
|
||||
Field string `json:"field"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
// Get cookies for authentication
|
||||
func (c *OnlineFixCrawler) getCookies() ([]*http.Cookie, error) {
|
||||
if c.username == "" || c.password == "" {
|
||||
return nil, errors.New("username or password is empty")
|
||||
}
|
||||
val, exists := cache.Get("onlinefix_cookies")
|
||||
if exists {
|
||||
var cookies []*http.Cookie
|
||||
if err := json.Unmarshal([]byte(val), &cookies); err != nil {
|
||||
c.logger.Warn("Failed to parse cached cookies", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse cached cookies: %w", err)
|
||||
}
|
||||
return cookies, nil
|
||||
}
|
||||
|
||||
func (c *OnlineFixCrawler) login() error {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: constant.OnlineFixCSRFURL,
|
||||
Headers: map[string]string{
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"Referer": constant.OnlineFixURL,
|
||||
},
|
||||
})
|
||||
resp, err := utils.Request().SetHeaders(map[string]string{
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"Referer": constant.OnlineFixURL,
|
||||
}).Get(constant.OnlineFixCSRFURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var csrf csrf
|
||||
if err = json.Unmarshal(resp.Data, &csrf); err != nil {
|
||||
return err
|
||||
c.logger.Error("Failed to fetch CSRF token", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch CSRF token: %w", err)
|
||||
}
|
||||
|
||||
for _, cookie := range resp.Cookie {
|
||||
c.cookies[cookie.Name] = cookie.Value
|
||||
type csrf struct {
|
||||
Field string `json:"field"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
var csrfToken csrf
|
||||
if err = json.Unmarshal(resp.Body(), &csrfToken); err != nil {
|
||||
c.logger.Error("Failed to parse CSRF token", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse CSRF token: %w", err)
|
||||
}
|
||||
|
||||
cookies := resp.Cookies()
|
||||
params := url.Values{}
|
||||
params.Add("login_name", config.Config.OnlineFix.User)
|
||||
params.Add("login_password", config.Config.OnlineFix.Password)
|
||||
params.Add(csrf.Field, csrf.Value)
|
||||
params.Add("login_name", c.username)
|
||||
params.Add("login_password", c.password)
|
||||
params.Add(csrfToken.Field, csrfToken.Value)
|
||||
params.Add("login", "submit")
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Url: constant.OnlineFixURL,
|
||||
Method: "POST",
|
||||
Cookies: c.cookies,
|
||||
Headers: map[string]string{
|
||||
"Origin": constant.OnlineFixURL,
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Referer": constant.OnlineFixURL,
|
||||
},
|
||||
Data: params,
|
||||
})
|
||||
|
||||
resp, err = utils.Request().SetHeaders(map[string]string{
|
||||
"Origin": constant.OnlineFixURL,
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Referer": constant.OnlineFixURL,
|
||||
}).SetCookies(cookies).SetBody(params.Encode()).Post(constant.OnlineFixURL)
|
||||
if err != nil {
|
||||
return err
|
||||
c.logger.Error("Failed to log in", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to log in: %w", err)
|
||||
}
|
||||
for _, cookie := range resp.Cookie {
|
||||
c.cookies[cookie.Name] = cookie.Value
|
||||
}
|
||||
return nil
|
||||
|
||||
cookies = resp.Cookies()
|
||||
cookiesJSON, _ := json.Marshal(cookies)
|
||||
_ = cache.SetWithExpire("onlinefix_cookies", string(cookiesJSON), time.Hour)
|
||||
|
||||
return cookies, nil
|
||||
}
|
||||
|
||||
// Format game name
|
||||
func OnlineFixFormatter(name string) string {
|
||||
name = strings.Replace(name, "по сети", "", -1)
|
||||
reg1 := regexp.MustCompile(`(?i)\(.*?\)`)
|
||||
name = reg1.ReplaceAllString(name, "")
|
||||
return strings.TrimSpace(name)
|
||||
name = strings.ReplaceAll(name, "по сети", "")
|
||||
reg := regexp.MustCompile(`(?i)\(.*?\)`)
|
||||
return strings.TrimSpace(reg.ReplaceAllString(name, ""))
|
||||
}
|
||||
|
320
crawler/rutracker.go
Normal file
320
crawler/rutracker.go
Normal file
@ -0,0 +1,320 @@
|
||||
package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"git.nite07.com/nite/ccs"
|
||||
"github.com/Danny-Dasilva/CycleTLS/cycletls"
|
||||
http "github.com/Danny-Dasilva/fhttp"
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type RutrackerCrawler struct {
|
||||
source string
|
||||
url string
|
||||
platform string
|
||||
username string
|
||||
password string
|
||||
logger *zap.Logger
|
||||
cfClearanceUrl string
|
||||
}
|
||||
|
||||
func NewRutrackerCrawler(url, source, platform, username, password, cfClearanceUrl string, logger *zap.Logger) *RutrackerCrawler {
|
||||
return &RutrackerCrawler{
|
||||
source: source,
|
||||
url: url,
|
||||
logger: logger,
|
||||
platform: platform,
|
||||
username: username,
|
||||
password: password,
|
||||
cfClearanceUrl: cfClearanceUrl,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *RutrackerCrawler) getSession() (*ccs.Session, error) {
|
||||
if r.username == "" || r.password == "" {
|
||||
r.logger.Error("Username or password is empty")
|
||||
return nil, fmt.Errorf("username or password is empty")
|
||||
}
|
||||
|
||||
cacheKey := "rutracker_session"
|
||||
var session ccs.Session
|
||||
if val, exist := cache.Get(cacheKey); exist {
|
||||
err := json.Unmarshal([]byte(val), &session)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to unmarshal cached session", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to unmarshal cached session: %w", err)
|
||||
}
|
||||
} else {
|
||||
var err error
|
||||
session, err = ccs.WAFSession(r.cfClearanceUrl, constant.RutrackerURL)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to create WAF session", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to create WAF session: %w", err)
|
||||
}
|
||||
|
||||
// login
|
||||
params := url.Values{}
|
||||
params.Add("login_username", r.username)
|
||||
params.Add("login_password", r.password)
|
||||
params.Add("login", "Вход")
|
||||
resp, err := ccs.RequestWithWAFSession(http.MethodPost, constant.RutrackerLoginURL, session, &cycletls.Options{
|
||||
Headers: map[string]string{
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
Body: params.Encode(),
|
||||
UserAgent: "Mozilla/5.0 (X11; Linux x86_64; rv:133.0) Gecko/20100101 Firefox/133.0",
|
||||
DisableRedirect: true,
|
||||
})
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to login", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
|
||||
if len(resp.Cookies) == 0 {
|
||||
r.logger.Error("Failed to login, no cookies found")
|
||||
return nil, fmt.Errorf("failed to login, no cookies found")
|
||||
}
|
||||
|
||||
success := false
|
||||
loginCookies := make([]ccs.Cookie, 0)
|
||||
for _, cookie := range resp.Cookies {
|
||||
if cookie.Name == "bb_session" {
|
||||
success = true
|
||||
}
|
||||
loginCookies = append(loginCookies, ccs.Cookie{
|
||||
Name: cookie.Name,
|
||||
Value: cookie.Value,
|
||||
})
|
||||
}
|
||||
|
||||
if !success {
|
||||
r.logger.Error("Failed to login, no bb_session cookie found")
|
||||
return nil, fmt.Errorf("failed to login, no bb_session cookie found")
|
||||
}
|
||||
|
||||
session.Cookies = append(session.Cookies, loginCookies...)
|
||||
|
||||
jsonBytes, err := json.Marshal(session)
|
||||
if err == nil {
|
||||
_ = cache.SetWithExpire(cacheKey, jsonBytes, 24*time.Hour)
|
||||
}
|
||||
}
|
||||
return &session, nil
|
||||
}
|
||||
|
||||
var regexps = []*regexp.Regexp{
|
||||
regexp.MustCompile(`\(.*?\)`),
|
||||
regexp.MustCompile(`\[.*?\]`),
|
||||
}
|
||||
|
||||
func (r *RutrackerCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
r.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
session, err := r.getSession()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get session: %w", err)
|
||||
}
|
||||
|
||||
resp, err := ccs.RequestWithWAFSession(http.MethodGet, URL, *session, nil)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to request URL", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to request URL: %w", err)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(utils.Windows1251ToUTF8([]byte(resp.Body))))
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML: %w", err)
|
||||
}
|
||||
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to get game item by url", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get game item by url: %w", err)
|
||||
}
|
||||
|
||||
name := doc.Find(".post_body").First().Find("span").First().Text()
|
||||
if strings.TrimSpace(name) == "" {
|
||||
name = doc.Find(".post_body").First().Find("span").Eq(1).Text()
|
||||
}
|
||||
name = strings.TrimSpace(name)
|
||||
if strings.TrimSpace(name) == "" {
|
||||
name = doc.Find("#topic-title").Text()
|
||||
for _, regex := range regexps {
|
||||
name = regex.ReplaceAllString(name, "")
|
||||
}
|
||||
name = strings.TrimSpace(name)
|
||||
if name == "" {
|
||||
r.logger.Error("Failed to find name")
|
||||
return nil, fmt.Errorf("failed to find name")
|
||||
}
|
||||
}
|
||||
|
||||
item.RawName = doc.Find("#topic-title").Text()
|
||||
item.Name = name
|
||||
item.Author = r.source
|
||||
item.Platform = r.platform
|
||||
item.Url = URL
|
||||
|
||||
magnet := doc.Find(".magnet-link").AttrOr("href", "")
|
||||
if magnet == "" {
|
||||
r.logger.Error("Failed to find magnet link")
|
||||
return nil, fmt.Errorf("failed to find magnet link")
|
||||
}
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": magnet,
|
||||
}
|
||||
sizeStr := doc.Find("#tor-size-humn").AttrOr("title", "")
|
||||
if sizeStr == "" {
|
||||
r.logger.Warn("Failed to find size")
|
||||
item.Size = "unknown"
|
||||
} else {
|
||||
size, err := strconv.ParseUint(sizeStr, 10, 64)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to parse size", zap.Error(err))
|
||||
} else {
|
||||
item.Size = utils.BytesToSize(size)
|
||||
}
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (r *RutrackerCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
session, err := r.getSession()
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to get session", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get session: %w", err)
|
||||
}
|
||||
|
||||
URL := fmt.Sprintf(r.url, (page-1)*50)
|
||||
resp, err := ccs.RequestWithWAFSession(http.MethodGet, URL, *session, nil)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to request URL", zap.String("url", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to request URL: %w", err)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(utils.Windows1251ToUTF8([]byte(resp.Body))))
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML: %w", err)
|
||||
}
|
||||
var urls []string
|
||||
var updateFlags []string
|
||||
doc.Find("[id^='trs-tr']").Each(func(i int, s *goquery.Selection) {
|
||||
a := s.Find(".t-title a")
|
||||
datetime := s.Find("td").Last().Text()
|
||||
url, exists := a.Attr("href")
|
||||
if !exists {
|
||||
r.logger.Error("Failed to find URL")
|
||||
return
|
||||
}
|
||||
fullURL := fmt.Sprintf(constant.RutrackerTopicURL, url)
|
||||
urls = append(urls, fullURL)
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(fullURL+datetime)))
|
||||
})
|
||||
|
||||
var res []*model.GameItem
|
||||
for i, URL := range urls {
|
||||
if db.IsGameCrawled(updateFlags[i], r.source) {
|
||||
r.logger.Info("Skipping already crawled URL", zap.String("URL", URL))
|
||||
continue
|
||||
}
|
||||
r.logger.Info("Crawling URL", zap.String("URL", URL))
|
||||
item, err := r.CrawlByUrl(URL)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to crawl URL", zap.String("URL", URL), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
err = db.SaveGameItem(item)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to save game item to database", zap.String("URL", URL), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
res = append(res, item)
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
r.logger.Warn("Failed to organize game item", zap.String("URL", URL), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.Info("Finished Crawl", zap.Int("Page", page), zap.Int("ItemsCrawled", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (r *RutrackerCrawler) CrawlAll() (res []*model.GameItem, err error) {
|
||||
r.logger.Info("Starting CrawlAll", zap.String("Source", r.source))
|
||||
totalPage, err := r.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
for i := 1; i <= totalPage; i++ {
|
||||
items, err := r.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to crawl page %d: %w", i, err)
|
||||
}
|
||||
res = append(res, items...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (r *RutrackerCrawler) GetTotalPageNum() (int, error) {
|
||||
session, err := r.getSession()
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to get session: %w", err)
|
||||
}
|
||||
|
||||
URL := fmt.Sprintf(r.url, 0)
|
||||
resp, err := ccs.RequestWithWAFSession(http.MethodGet, URL, *session, nil)
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to request URL", zap.String("url", URL), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to request URL: %w", err)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(utils.Windows1251ToUTF8([]byte(resp.Body))))
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse HTML: %w", err)
|
||||
}
|
||||
|
||||
var pg []string
|
||||
doc.Find(".pg").Each(func(i int, s *goquery.Selection) {
|
||||
pg = append(pg, s.Text())
|
||||
})
|
||||
|
||||
if len(pg) == 0 {
|
||||
r.logger.Error("Failed to find page number")
|
||||
return 0, fmt.Errorf("failed to find page number")
|
||||
}
|
||||
|
||||
totalPage := 0
|
||||
for _, c := range pg[len(pg)-1] {
|
||||
if unicode.IsDigit(c) {
|
||||
totalPage, err = strconv.Atoi(pg[len(pg)-1])
|
||||
break
|
||||
} else {
|
||||
totalPage, err = strconv.Atoi(pg[len(pg)-2])
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
r.logger.Error("Failed to parse page number", zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse page number: %w", err)
|
||||
}
|
||||
return totalPage, nil
|
||||
}
|
47
crawler/rutracker_linux_game.go
Normal file
47
crawler/rutracker_linux_game.go
Normal file
@ -0,0 +1,47 @@
|
||||
package crawler
|
||||
|
||||
import (
|
||||
"game-crawler/model"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type RutrackerLinuxGameCrawler struct {
|
||||
logger *zap.Logger
|
||||
crawler RutrackerCrawler
|
||||
}
|
||||
|
||||
func NewRutrackerLinuxGameCrawler(cfClearanceUrl, username, password string, logger *zap.Logger) *RutrackerLinuxGameCrawler {
|
||||
return &RutrackerLinuxGameCrawler{
|
||||
logger: logger,
|
||||
crawler: *NewRutrackerCrawler(
|
||||
"https://rutracker.org/forum/tracker.php?f=1992&start=%v",
|
||||
"rutracker-linux-game",
|
||||
"linux",
|
||||
username,
|
||||
password,
|
||||
cfClearanceUrl,
|
||||
logger,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *RutrackerLinuxGameCrawler) Name() string {
|
||||
return "OmgGodsCrawler"
|
||||
}
|
||||
|
||||
func (c *RutrackerLinuxGameCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
return c.crawler.CrawlByUrl(URL)
|
||||
}
|
||||
|
||||
func (c *RutrackerLinuxGameCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
return c.crawler.Crawl(page)
|
||||
}
|
||||
|
||||
func (c *RutrackerLinuxGameCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
return c.crawler.CrawlAll()
|
||||
}
|
||||
|
||||
func (c *RutrackerLinuxGameCrawler) GetTotalPageNum() (int, error) {
|
||||
return c.crawler.GetTotalPageNum()
|
||||
}
|
287
crawler/steam.go
287
crawler/steam.go
@ -9,266 +9,123 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/cache"
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
)
|
||||
|
||||
func getSteamID(name string) (int, error) {
|
||||
baseURL, _ := url.Parse(constant.SteamSearchURL)
|
||||
params := url.Values{}
|
||||
params.Add("term", name)
|
||||
baseURL.RawQuery = params.Encode()
|
||||
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: baseURL.String(),
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
idRegex := regexp.MustCompile(`data-ds-appid="(.*?)"`)
|
||||
nameRegex := regexp.MustCompile(`<span class="title">(.*?)</span>`)
|
||||
idRegexRes := idRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
nameRegexRes := nameRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||
|
||||
if len(idRegexRes) == 0 {
|
||||
return 0, fmt.Errorf("steam ID not found: %s", name)
|
||||
}
|
||||
|
||||
maxSim := 0.0
|
||||
maxSimID := 0
|
||||
for i, id := range idRegexRes {
|
||||
idStr := id[1]
|
||||
nameStr := nameRegexRes[i][1]
|
||||
if index := strings.Index(idStr, ","); index != -1 {
|
||||
idStr = idStr[:index]
|
||||
}
|
||||
if strings.EqualFold(strings.TrimSpace(nameStr), strings.TrimSpace(name)) {
|
||||
return strconv.Atoi(idStr)
|
||||
} else {
|
||||
sim := utils.Similarity(nameStr, name)
|
||||
if sim >= 0.8 && sim > maxSim {
|
||||
maxSim = sim
|
||||
maxSimID, _ = strconv.Atoi(idStr)
|
||||
}
|
||||
}
|
||||
}
|
||||
if maxSimID != 0 {
|
||||
return maxSimID, nil
|
||||
}
|
||||
return 0, fmt.Errorf("steam ID not found: %s", name)
|
||||
}
|
||||
|
||||
func GetSteamID(name string) (int, error) {
|
||||
name1 := name
|
||||
name2 := FormatName(name)
|
||||
names := []string{name1}
|
||||
if name1 != name2 {
|
||||
names = append(names, name2)
|
||||
}
|
||||
for _, n := range names {
|
||||
id, err := getSteamID(n)
|
||||
if err == nil {
|
||||
return id, nil
|
||||
}
|
||||
}
|
||||
return 0, errors.New("steam ID not found")
|
||||
}
|
||||
|
||||
func GetSteamIDCache(name string) (int, error) {
|
||||
if config.Config.RedisAvaliable {
|
||||
key := fmt.Sprintf("steam_id:%s", name)
|
||||
val, exist := cache.Get(key)
|
||||
if exist {
|
||||
id, err := strconv.Atoi(val)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return id, nil
|
||||
} else {
|
||||
id, err := GetSteamID(name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
_ = cache.Add(key, id)
|
||||
return id, nil
|
||||
}
|
||||
} else {
|
||||
return GetSteamID(name)
|
||||
}
|
||||
}
|
||||
|
||||
// GetSteamAppDetail fetches the details of a Steam app by its ID.
|
||||
func GetSteamAppDetail(id int) (*model.SteamAppDetail, error) {
|
||||
key := fmt.Sprintf("steam_game:%d", id)
|
||||
if val, exist := cache.Get(key); exist {
|
||||
var detail model.SteamAppDetail
|
||||
if err := json.Unmarshal([]byte(val), &detail); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal cached Steam app detail for ID %d: %w", id, err)
|
||||
}
|
||||
return &detail, nil
|
||||
}
|
||||
|
||||
baseURL, _ := url.Parse(constant.SteamAppDetailURL)
|
||||
params := url.Values{}
|
||||
params.Add("appids", strconv.Itoa(id))
|
||||
// params.Add("l", "schinese")
|
||||
baseURL.RawQuery = params.Encode()
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: baseURL.String(),
|
||||
Headers: map[string]string{
|
||||
"User-Agent": "",
|
||||
},
|
||||
})
|
||||
|
||||
resp, err := utils.Request().SetHeaders(map[string]string{
|
||||
"User-Agent": "",
|
||||
}).Get(baseURL.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to fetch Steam app detail for ID %d: %w", id, err)
|
||||
}
|
||||
|
||||
var detail map[string]*model.SteamAppDetail
|
||||
if err = json.Unmarshal(resp.Data, &detail); err != nil {
|
||||
return nil, err
|
||||
if err := json.Unmarshal(resp.Body(), &detail); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal Steam app detail for ID %d: %w", id, err)
|
||||
}
|
||||
if _, ok := detail[strconv.Itoa(id)]; !ok {
|
||||
return nil, fmt.Errorf("steam App not found: %d", id)
|
||||
}
|
||||
if detail[strconv.Itoa(id)] == nil {
|
||||
return nil, fmt.Errorf("steam App not found: %d", id)
|
||||
}
|
||||
return detail[strconv.Itoa(id)], nil
|
||||
}
|
||||
|
||||
func GetSteamAppDetailCache(id int) (*model.SteamAppDetail, error) {
|
||||
if config.Config.RedisAvaliable {
|
||||
key := fmt.Sprintf("steam_game:%d", id)
|
||||
val, exist := cache.Get(key)
|
||||
if exist {
|
||||
var detail model.SteamAppDetail
|
||||
if err := json.Unmarshal([]byte(val), &detail); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &detail, nil
|
||||
} else {
|
||||
data, err := GetSteamAppDetail(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dataBytes, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_ = cache.Add(key, dataBytes)
|
||||
return data, nil
|
||||
}
|
||||
if appDetail, ok := detail[strconv.Itoa(id)]; !ok || appDetail == nil {
|
||||
return nil, fmt.Errorf("steam app not found: %d", id)
|
||||
} else {
|
||||
return GetSteamAppDetail(id)
|
||||
// Cache the result
|
||||
jsonBytes, err := json.Marshal(appDetail)
|
||||
if err == nil {
|
||||
_ = cache.Set(key, string(jsonBytes))
|
||||
}
|
||||
return appDetail, nil
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateSteamGameInfo generates detailed game information based on a Steam App ID.
|
||||
func GenerateSteamGameInfo(id int) (*model.GameInfo, error) {
|
||||
item := &model.GameInfo{}
|
||||
detail, err := GetSteamAppDetailCache(id)
|
||||
detail, err := GetSteamAppDetail(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to fetch Steam app detail for ID %d: %w", id, err)
|
||||
}
|
||||
item.SteamID = id
|
||||
item.Name = detail.Data.Name
|
||||
item.Description = detail.Data.ShortDescription
|
||||
item.Cover = fmt.Sprintf("https://shared.cloudflare.steamstatic.com/store_item_assets/steam/apps/%v/library_600x900_2x.jpg", id)
|
||||
item.Developers = detail.Data.Developers
|
||||
item.Publishers = detail.Data.Publishers
|
||||
var screenshots []string
|
||||
|
||||
item := &model.GameInfo{
|
||||
SteamID: id,
|
||||
Name: detail.Data.Name,
|
||||
Description: detail.Data.ShortDescription,
|
||||
Cover: fmt.Sprintf("https://shared.cloudflare.steamstatic.com/store_item_assets/steam/apps/%v/library_600x900_2x.jpg", id),
|
||||
Developers: detail.Data.Developers,
|
||||
Publishers: detail.Data.Publishers,
|
||||
Screenshots: make([]string, 0, len(detail.Data.Screenshots)),
|
||||
}
|
||||
|
||||
for _, screenshot := range detail.Data.Screenshots {
|
||||
screenshots = append(screenshots, screenshot.PathFull)
|
||||
item.Screenshots = append(item.Screenshots, screenshot.PathFull)
|
||||
}
|
||||
item.Screenshots = screenshots
|
||||
|
||||
return item, nil
|
||||
}
|
||||
|
||||
// OrganizeGameItemWithSteam Will add GameItem.ID to the newly added GameInfo.GameIDs
|
||||
func OrganizeGameItemWithSteam(id int, game *model.GameItem) (*model.GameInfo, error) {
|
||||
var err error
|
||||
if id == 0 {
|
||||
id, err = GetSteamIDCache(game.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
d, err := db.GetGameInfoByPlatformID("steam", id)
|
||||
if err == nil {
|
||||
d.GameIDs = append(d.GameIDs, game.ID)
|
||||
d.GameIDs = utils.Unique(d.GameIDs)
|
||||
return d, nil
|
||||
}
|
||||
detail, err := GenerateGameInfo("steam", id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
detail.GameIDs = append(detail.GameIDs, game.ID)
|
||||
detail.GameIDs = utils.Unique(detail.GameIDs)
|
||||
return detail, nil
|
||||
}
|
||||
|
||||
// GetSteamIDByIGDBID retrieves the Steam App ID associated with a given IGDB ID.
|
||||
func GetSteamIDByIGDBID(IGDBID int) (int, error) {
|
||||
var err error
|
||||
if TwitchToken == "" {
|
||||
TwitchToken, err = LoginTwitch()
|
||||
key := fmt.Sprintf("steam_game:%d", IGDBID)
|
||||
if val, exist := cache.Get(key); exist {
|
||||
id, err := strconv.Atoi(val)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return 0, fmt.Errorf("failed to parse cached Steam ID for IGDB ID %d: %w", IGDBID, err)
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: constant.IGDBWebsitesURL,
|
||||
Method: "POST",
|
||||
Headers: map[string]string{
|
||||
"Client-ID": config.Config.Twitch.ClientID,
|
||||
"Authorization": "Bearer " + TwitchToken,
|
||||
"User-Agent": "",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
Data: fmt.Sprintf(`where game = %v; fields *; limit 500;`, IGDBID),
|
||||
})
|
||||
|
||||
query := fmt.Sprintf(`where game = %v; fields *; limit 500;`, IGDBID)
|
||||
resp, err := igdbRequest(constant.IGDBWebsitesURL, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return 0, fmt.Errorf("failed to fetch IGDB websites for IGDB ID %d: %w", IGDBID, err)
|
||||
}
|
||||
|
||||
var data []struct {
|
||||
Game int `json:"game"`
|
||||
Url string `json:"url"`
|
||||
}
|
||||
if err = json.Unmarshal(resp.Data, &data); err != nil {
|
||||
return 0, err
|
||||
if err := json.Unmarshal(resp.Body(), &data); err != nil {
|
||||
return 0, fmt.Errorf("failed to unmarshal IGDB websites response for IGDB ID %d: %w", IGDBID, err)
|
||||
}
|
||||
|
||||
if len(data) == 0 {
|
||||
return 0, errors.New("not found")
|
||||
return 0, errors.New("steam ID not found")
|
||||
}
|
||||
|
||||
for _, v := range data {
|
||||
if strings.HasPrefix(v.Url, "https://store.steampowered.com/app/") {
|
||||
regex := regexp.MustCompile(`https://store.steampowered.com/app/(\d+)/?`)
|
||||
idStr := regex.FindStringSubmatch(v.Url)
|
||||
if len(idStr) < 2 {
|
||||
return 0, errors.New("failed parse")
|
||||
idMatch := regex.FindStringSubmatch(v.Url)
|
||||
if len(idMatch) < 2 {
|
||||
return 0, errors.New("failed to parse Steam ID from URL")
|
||||
}
|
||||
steamID, err := strconv.Atoi(idStr[1])
|
||||
|
||||
steamID, err := strconv.Atoi(idMatch[1])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return 0, fmt.Errorf("failed to convert Steam ID from URL %s: %w", v.Url, err)
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
_ = cache.Set(key, strconv.Itoa(steamID))
|
||||
return steamID, nil
|
||||
}
|
||||
}
|
||||
return 0, errors.New("not found")
|
||||
}
|
||||
|
||||
func GetSteamIDByIGDBIDCache(IGDBID int) (int, error) {
|
||||
if config.Config.RedisAvaliable {
|
||||
key := fmt.Sprintf("steam_game:%d", IGDBID)
|
||||
val, exist := cache.Get(key)
|
||||
if exist {
|
||||
id, err := strconv.Atoi(val)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return id, nil
|
||||
} else {
|
||||
id, err := GetSteamIDByIGDBID(IGDBID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
dataBytes := strconv.Itoa(id)
|
||||
_ = cache.Add(key, dataBytes)
|
||||
return id, nil
|
||||
}
|
||||
} else {
|
||||
return GetSteamIDByIGDBID(IGDBID)
|
||||
}
|
||||
return 0, fmt.Errorf("no valid Steam ID found for IGDB ID %d", IGDBID)
|
||||
}
|
||||
|
@ -2,63 +2,116 @@ package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"pcgamedb/db"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/db"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
func GetSteam250(url string) ([]*model.GameInfo, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// GetSteam250 fetches Steam250 game rankings from the given URL.
|
||||
func GetSteam250(URL string) ([]*model.GameInfo, error) {
|
||||
key := "steam250:" + url.QueryEscape(URL)
|
||||
if val, ok := cache.Get(key); ok {
|
||||
var infos []*model.GameInfo
|
||||
if err := json.Unmarshal([]byte(val), &infos); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal cached Steam250 data for URL %s: %w", URL, err)
|
||||
}
|
||||
return infos, nil
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
resp, err := utils.Request().Get(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to fetch Steam250 rankings from URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse Steam250 HTML document for URL %s: %w", URL, err)
|
||||
}
|
||||
|
||||
var rank []model.Steam250Item
|
||||
var item model.Steam250Item
|
||||
steamIDs := make([]int, 0)
|
||||
|
||||
doc.Find(".appline").Each(func(i int, s *goquery.Selection) {
|
||||
// Extract game name
|
||||
item.Name = s.Find(".title>a").First().Text()
|
||||
if item.Name == "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract Steam ID
|
||||
idStr := s.Find(".store").AttrOr("href", "")
|
||||
idSlice := regexp.MustCompile(`app/(\d+)/`).FindStringSubmatch(idStr)
|
||||
if len(idSlice) < 2 {
|
||||
return
|
||||
}
|
||||
item.SteamID, _ = strconv.Atoi(idSlice[1])
|
||||
|
||||
steamID, err := strconv.Atoi(idSlice[1])
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
item.SteamID = steamID
|
||||
rank = append(rank, item)
|
||||
steamIDs = append(steamIDs, item.SteamID)
|
||||
steamIDs = append(steamIDs, steamID)
|
||||
})
|
||||
|
||||
if len(steamIDs) == 0 {
|
||||
return nil, fmt.Errorf("no valid Steam IDs found in Steam250 rankings for URL %s", URL)
|
||||
}
|
||||
|
||||
infos, err := db.GetGameInfosByPlatformIDs("steam", steamIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to fetch game info for Steam IDs %v: %w", steamIDs, err)
|
||||
}
|
||||
return infos[:10], nil
|
||||
|
||||
// Limit the result to 10 entries (if applicable)
|
||||
if len(infos) > 10 {
|
||||
infos = infos[:10]
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
jsonBytes, err := json.Marshal(infos)
|
||||
if err == nil {
|
||||
_ = cache.SetWithExpire(key, string(jsonBytes), 24*time.Hour)
|
||||
}
|
||||
|
||||
return infos, nil
|
||||
}
|
||||
|
||||
// GetSteam250Top250 retrieves the top 250 games from Steam250.
|
||||
func GetSteam250Top250() ([]*model.GameInfo, error) {
|
||||
return GetSteam250(constant.Steam250Top250URL)
|
||||
}
|
||||
|
||||
// GetSteam250BestOfTheYear retrieves the best games of the current year from Steam250.
|
||||
func GetSteam250BestOfTheYear() ([]*model.GameInfo, error) {
|
||||
return GetSteam250(fmt.Sprintf(constant.Steam250BestOfTheYearURL, time.Now().UTC().Year()))
|
||||
year := time.Now().UTC().Year()
|
||||
return GetSteam250(fmt.Sprintf(constant.Steam250BestOfTheYearURL, year))
|
||||
}
|
||||
|
||||
// GetSteam250WeekTop50 retrieves the top 50 games of the week from Steam250.
|
||||
func GetSteam250WeekTop50() ([]*model.GameInfo, error) {
|
||||
return GetSteam250(constant.Steam250WeekTop50URL)
|
||||
}
|
||||
|
||||
// GetSteam250MonthTop50 retrieves the top 50 games of the month from Steam250.
|
||||
func GetSteam250MonthTop50() ([]*model.GameInfo, error) {
|
||||
return GetSteam250(constant.Steam250MonthTop50URL)
|
||||
}
|
||||
|
||||
// GetSteam250MostPlayed retrieves the most played games from Steam250.
|
||||
func GetSteam250MostPlayed() ([]*model.GameInfo, error) {
|
||||
return GetSteam250(constant.Steam250MostPlayedURL)
|
||||
}
|
||||
|
@ -2,141 +2,174 @@ package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// SteamRIPCrawler defines a crawler for the SteamRIP website.
|
||||
type SteamRIPCrawler struct {
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
// NewSteamRIPCrawler creates a new instance of SteamRIPCrawler.
|
||||
func NewSteamRIPCrawler(logger *zap.Logger) *SteamRIPCrawler {
|
||||
return &SteamRIPCrawler{
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// Name returns the name of the crawler.
|
||||
func (c *SteamRIPCrawler) Name() string {
|
||||
return "SteamRIPCrawler"
|
||||
}
|
||||
|
||||
func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
})
|
||||
// CrawlByUrl crawls a single game page from SteamRIP by URL.
|
||||
func (c *SteamRIPCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
c.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
|
||||
// Fetch the page content
|
||||
resp, err := utils.Request().Get(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch URL", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch URL %s: %w", URL, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML document", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML document for URL %s: %w", URL, err)
|
||||
}
|
||||
item, err := db.GetGameItemByUrl(url)
|
||||
|
||||
// Retrieve or create game item
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Warn("Game item not found in database, creating a new one", zap.String("URL", URL), zap.Error(err))
|
||||
item = &model.GameItem{}
|
||||
}
|
||||
|
||||
// Extract game details
|
||||
item.RawName = strings.TrimSpace(doc.Find(".entry-title").First().Text())
|
||||
if item.RawName == "" {
|
||||
c.logger.Warn("Game title not found", zap.String("URL", URL))
|
||||
return nil, errors.New("game title not found")
|
||||
}
|
||||
item.Name = SteamRIPFormatter(item.RawName)
|
||||
item.Url = url
|
||||
item.Url = URL
|
||||
item.Author = "SteamRIP"
|
||||
item.Platform = "windows"
|
||||
|
||||
// Extract game size
|
||||
sizeRegex := regexp.MustCompile(`(?i)<li><strong>Game Size:\s?</strong>(.*?)</li>`)
|
||||
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data))
|
||||
if len(sizeRegexRes) != 0 {
|
||||
item.Size = strings.TrimSpace(sizeRegexRes[1])
|
||||
sizeMatch := sizeRegex.FindStringSubmatch(string(resp.Body()))
|
||||
if len(sizeMatch) > 1 {
|
||||
item.Size = strings.TrimSpace(sizeMatch[1])
|
||||
} else {
|
||||
item.Size = "unknown"
|
||||
}
|
||||
megadbRegex := regexp.MustCompile(`(?i)(?:https?:)?(//megadb\.net/[^"]+)`)
|
||||
megadbRegexRes := megadbRegex.FindStringSubmatch(string(resp.Data))
|
||||
if len(megadbRegexRes) != 0 {
|
||||
item.Download = fmt.Sprintf("https:%s", megadbRegexRes[1])
|
||||
}
|
||||
if item.Download == "" {
|
||||
gofileRegex := regexp.MustCompile(`(?i)(?:https?:)?(//gofile\.io/d/[^"]+)`)
|
||||
gofileRegexRes := gofileRegex.FindStringSubmatch(string(resp.Data))
|
||||
if len(gofileRegexRes) != 0 {
|
||||
item.Download = fmt.Sprintf("https:%s", gofileRegexRes[1])
|
||||
}
|
||||
}
|
||||
if item.Download == "" {
|
||||
filecryptRegex := regexp.MustCompile(`(?i)(?:https?:)?(//filecrypt\.co/Container/[^"]+)`)
|
||||
filecryptRegexRes := filecryptRegex.FindStringSubmatch(string(resp.Data))
|
||||
if len(filecryptRegexRes) != 0 {
|
||||
item.Download = fmt.Sprintf("https:%s", filecryptRegexRes[1])
|
||||
}
|
||||
}
|
||||
if item.Download == "" {
|
||||
|
||||
downloadLinks := map[string]string{}
|
||||
doc.Find(".shortc-button").Each(func(i int, s *goquery.Selection) {
|
||||
downloadLink, _ := s.Attr("href")
|
||||
u, _ := url.Parse(downloadLink)
|
||||
downloadLinks[u.Host] = downloadLink
|
||||
})
|
||||
item.Downloads = downloadLinks
|
||||
|
||||
if len(item.Downloads) == 0 {
|
||||
c.logger.Warn("No download links found", zap.String("URL", URL))
|
||||
return nil, errors.New("failed to find download link")
|
||||
}
|
||||
|
||||
return item, nil
|
||||
}
|
||||
|
||||
// Crawl crawls a limited number of games from the SteamRIP game list.
|
||||
func (c *SteamRIPCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
||||
count := 0
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: constant.SteamRIPGameListURL,
|
||||
})
|
||||
c.logger.Info("Starting SteamRIP crawl", zap.Int("limit", num))
|
||||
|
||||
// Fetch the game list page
|
||||
resp, err := utils.Request().Get(constant.SteamRIPGameListURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch game list", zap.String("URL", constant.SteamRIPGameListURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch game list: %w", err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse game list HTML document", zap.String("URL", constant.SteamRIPGameListURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse game list HTML document: %w", err)
|
||||
}
|
||||
|
||||
var items []*model.GameItem
|
||||
var urls []string
|
||||
var updateFlags []string // title
|
||||
var updateFlags []string
|
||||
|
||||
// Extract game URLs
|
||||
doc.Find(".az-list-item>a").Each(func(i int, s *goquery.Selection) {
|
||||
u, exist := s.Attr("href")
|
||||
if !exist {
|
||||
u, exists := s.Attr("href")
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
urls = append(urls, fmt.Sprintf("%s%s", constant.SteamRIPBaseURL, u))
|
||||
updateFlags = append(updateFlags, s.Text())
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(s.Text())))
|
||||
})
|
||||
|
||||
// Crawl games
|
||||
count := 0
|
||||
for i, u := range urls {
|
||||
if count == num {
|
||||
if num > 0 && count == num {
|
||||
break
|
||||
}
|
||||
if db.IsSteamRIPCrawled(updateFlags[i]) {
|
||||
c.logger.Info("Skipping already crawled game", zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
|
||||
c.logger.Info("Crawling game", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl game", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
if err := db.SaveGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to save item", zap.Error(err))
|
||||
c.logger.Warn("Failed to save game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
items = append(items, item)
|
||||
count++
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c.logger.Info("SteamRIP crawl completed", zap.Int("gamesCrawled", len(items)))
|
||||
return items, nil
|
||||
}
|
||||
|
||||
// CrawlAll crawls all games from the SteamRIP game list.
|
||||
func (c *SteamRIPCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting full crawl of SteamRIP")
|
||||
return c.Crawl(-1)
|
||||
}
|
||||
|
||||
// SteamRIPFormatter formats the game name by removing unnecessary text.
|
||||
func SteamRIPFormatter(name string) string {
|
||||
name = regexp.MustCompile(`\([^\)]+\)`).ReplaceAllString(name, "")
|
||||
name = strings.Replace(name, "Free Download", "", -1)
|
||||
|
184
crawler/xatab.go
184
crawler/xatab.go
@ -2,16 +2,17 @@ package crawler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"go.uber.org/zap"
|
||||
@ -31,153 +32,215 @@ func (c *XatabCrawler) Name() string {
|
||||
return "XatabCrawler"
|
||||
}
|
||||
|
||||
// Crawl crawls a single page of the Xatab website.
|
||||
func (c *XatabCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||
requestURL := fmt.Sprintf("%s/page/%v", constant.XatabBaseURL, page)
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: requestURL,
|
||||
})
|
||||
c.logger.Info("Fetching page", zap.String("URL", requestURL))
|
||||
|
||||
// Fetch the page content
|
||||
resp, err := utils.Request().Get(requestURL)
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to fetch", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch page", zap.String("URL", requestURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch page %d: %w", page, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse HTML", zap.String("URL", requestURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse HTML for page %d: %w", page, err)
|
||||
}
|
||||
|
||||
var urls []string
|
||||
var updateFlags []string // title
|
||||
var updateFlags []string
|
||||
|
||||
// Extract game URLs and titles
|
||||
doc.Find(".entry").Each(func(i int, s *goquery.Selection) {
|
||||
u, exist := s.Find(".entry__title.h2 a").Attr("href")
|
||||
if !exist {
|
||||
u, exists := s.Find(".entry__title.h2 a").Attr("href")
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
urls = append(urls, u)
|
||||
updateFlags = append(updateFlags, s.Find(".entry__title.h2 a").Text())
|
||||
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(s.Find(".entry__title.h2 a").Text())))
|
||||
})
|
||||
|
||||
// Process each game URL
|
||||
var res []*model.GameItem
|
||||
for i, u := range urls {
|
||||
if db.IsXatabCrawled(updateFlags[i]) {
|
||||
c.logger.Info("Skipping already crawled game", zap.String("URL", u))
|
||||
continue
|
||||
}
|
||||
c.logger.Info("Crawling", zap.String("URL", u))
|
||||
|
||||
c.logger.Info("Crawling game", zap.String("URL", u))
|
||||
item, err := c.CrawlByUrl(u)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to crawl game", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
err = db.SaveGameItem(item)
|
||||
if err != nil {
|
||||
c.logger.Warn("Failed to save", zap.Error(err))
|
||||
|
||||
item.UpdateFlag = updateFlags[i]
|
||||
if err := db.SaveGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to save game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, item)
|
||||
|
||||
if err := OrganizeGameItem(item); err != nil {
|
||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||
c.logger.Warn("Failed to organize game item", zap.String("URL", u), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c.logger.Info("Crawled page successfully", zap.Int("gamesCrawled", len(res)), zap.Int("page", page))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *XatabCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: url,
|
||||
})
|
||||
// CrawlByUrl crawls a single game page from Xatab by URL.
|
||||
func (c *XatabCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||
c.logger.Info("Crawling game", zap.String("URL", URL))
|
||||
|
||||
// Fetch the game page
|
||||
resp, err := utils.Request().Get(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch game page", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch game page %s: %w", URL, err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to parse game HTML", zap.String("URL", URL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse game HTML for URL %s: %w", URL, err)
|
||||
}
|
||||
item, err := db.GetGameItemByUrl(url)
|
||||
|
||||
// Retrieve or create game item
|
||||
item, err := db.GetGameItemByUrl(URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Warn("Failed to fetch game item from database, creating new", zap.String("URL", URL), zap.Error(err))
|
||||
item = &model.GameItem{}
|
||||
}
|
||||
|
||||
item.Url = URL
|
||||
item.RawName = strings.TrimSpace(doc.Find(".inner-entry__title").First().Text())
|
||||
if item.RawName == "" {
|
||||
c.logger.Warn("Game title not found", zap.String("URL", URL))
|
||||
return nil, errors.New("game title not found")
|
||||
}
|
||||
item.Url = url
|
||||
item.RawName = doc.Find(".inner-entry__title").First().Text()
|
||||
item.Name = XatabFormatter(item.RawName)
|
||||
item.Author = "Xatab"
|
||||
item.UpdateFlag = item.RawName
|
||||
item.Platform = "windows"
|
||||
|
||||
// Extract download URL
|
||||
downloadURL := doc.Find("#download>a").First().AttrOr("href", "")
|
||||
if downloadURL == "" {
|
||||
c.logger.Warn("Download URL not found", zap.String("URL", URL))
|
||||
return nil, errors.New("failed to find download URL")
|
||||
}
|
||||
resp, err = utils.Fetch(utils.FetchConfig{
|
||||
Headers: map[string]string{"Referer": url},
|
||||
Url: downloadURL,
|
||||
})
|
||||
|
||||
// Fetch torrent file and convert to magnet link
|
||||
resp, err = utils.Request().SetHeaders(map[string]string{"Referer": URL}).Get(downloadURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to fetch download link", zap.String("URL", downloadURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to fetch download link %s: %w", downloadURL, err)
|
||||
}
|
||||
magnet, size, err := utils.ConvertTorrentToMagnet(resp.Data)
|
||||
|
||||
magnet, size, err := utils.ConvertTorrentToMagnet(resp.Body())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to convert torrent to magnet", zap.String("URL", downloadURL), zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to convert torrent to magnet: %w", err)
|
||||
}
|
||||
|
||||
item.Size = size
|
||||
item.Download = magnet
|
||||
item.Downloads = map[string]string{
|
||||
"magnet": magnet,
|
||||
}
|
||||
|
||||
return item, nil
|
||||
}
|
||||
|
||||
// CrawlMulti crawls multiple pages from Xatab.
|
||||
func (c *XatabCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting multi-page crawl", zap.Ints("pages", pages))
|
||||
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
var res []*model.GameItem
|
||||
for _, page := range pages {
|
||||
if page > totalPageNum {
|
||||
c.logger.Warn("Skipping page out of range", zap.Int("page", page), zap.Int("totalPages", totalPageNum))
|
||||
continue
|
||||
}
|
||||
|
||||
items, err := c.Crawl(page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Warn("Failed to crawl page", zap.Int("page", page), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Multi-page crawl completed", zap.Int("gamesCrawled", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// CrawlAll crawls all pages from Xatab.
|
||||
func (c *XatabCrawler) CrawlAll() ([]*model.GameItem, error) {
|
||||
c.logger.Info("Starting full crawl of Xatab")
|
||||
|
||||
totalPageNum, err := c.GetTotalPageNum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Error("Failed to get total page number", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get total page number: %w", err)
|
||||
}
|
||||
|
||||
var res []*model.GameItem
|
||||
for i := 1; i <= totalPageNum; i++ {
|
||||
items, err := c.Crawl(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
c.logger.Warn("Failed to crawl page", zap.Int("page", i), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, items...)
|
||||
}
|
||||
|
||||
c.logger.Info("Full crawl completed", zap.Int("gamesCrawled", len(res)))
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// GetTotalPageNum retrieves the total number of pages from Xatab.
|
||||
func (c *XatabCrawler) GetTotalPageNum() (int, error) {
|
||||
resp, err := utils.Fetch(utils.FetchConfig{
|
||||
Url: constant.XatabBaseURL,
|
||||
})
|
||||
c.logger.Info("Fetching total page number")
|
||||
|
||||
resp, err := utils.Request().Get(constant.XatabBaseURL)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to fetch base URL", zap.String("URL", constant.XatabBaseURL), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to fetch base URL: %w", err)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to parse base HTML", zap.String("URL", constant.XatabBaseURL), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse base HTML: %w", err)
|
||||
}
|
||||
|
||||
pageStr := doc.Find(".pagination>a").Last().Text()
|
||||
totalPageNum, err := strconv.Atoi(pageStr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
c.logger.Error("Failed to parse total page number", zap.String("pageStr", pageStr), zap.Error(err))
|
||||
return 0, fmt.Errorf("failed to parse total page number: %w", err)
|
||||
}
|
||||
|
||||
c.logger.Info("Fetched total page number", zap.Int("totalPages", totalPageNum))
|
||||
return totalPageNum, nil
|
||||
}
|
||||
|
||||
var xatabRegexps = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i)\sPC$`),
|
||||
}
|
||||
|
||||
// XatabFormatter formats the game name by removing unnecessary text.
|
||||
func XatabFormatter(name string) string {
|
||||
reg1 := regexp.MustCompile(`(?i)v(er)?\s?(\.)?\d+(\.\d+)*`)
|
||||
if index := reg1.FindIndex([]byte(name)); index != nil {
|
||||
@ -196,10 +259,13 @@ func XatabFormatter(name string) string {
|
||||
name = name[:index]
|
||||
}
|
||||
name = strings.TrimSpace(name)
|
||||
|
||||
// Remove specific patterns
|
||||
for _, re := range xatabRegexps {
|
||||
name = re.ReplaceAllString(name, "")
|
||||
}
|
||||
|
||||
// Handle names separated by "/"
|
||||
if index := strings.Index(name, "/"); index != -1 {
|
||||
names := strings.Split(name, "/")
|
||||
longestLength := 0
|
||||
@ -215,3 +281,7 @@ func XatabFormatter(name string) string {
|
||||
|
||||
return strings.TrimSpace(name)
|
||||
}
|
||||
|
||||
var xatabRegexps = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i)\sPC$`),
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"pcgamedb/model"
|
||||
"game-crawler/model"
|
||||
)
|
||||
|
||||
func GetDODIGameItems() ([]*model.GameItem, error) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
package db
|
||||
|
||||
import "pcgamedb/model"
|
||||
import "game-crawler/model"
|
||||
|
||||
func IsARMGDDNCrawled(flag string) bool {
|
||||
return IsGameCrawled(flag, "armgddn")
|
||||
|
@ -3,7 +3,7 @@ package db
|
||||
import (
|
||||
"context"
|
||||
|
||||
"pcgamedb/config"
|
||||
"game-crawler/config"
|
||||
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
|
8
db/db.go
8
db/db.go
@ -6,8 +6,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/log"
|
||||
"game-crawler/config"
|
||||
"game-crawler/log"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
@ -32,10 +32,6 @@ var (
|
||||
)
|
||||
|
||||
func connect() {
|
||||
if !config.Config.DatabaseAvaliable {
|
||||
log.Logger.Panic("Missing database configuration information")
|
||||
return
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
clientOptions := options.Client().ApplyURI(fmt.Sprintf(
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"game-crawler/model"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"pcgamedb/model"
|
||||
)
|
||||
|
||||
func Export() ([]byte, []byte, error) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
package db
|
||||
|
||||
import "pcgamedb/model"
|
||||
import "game-crawler/model"
|
||||
|
||||
func GetFitgirlAllGameItems() ([]*model.GameItem, error) {
|
||||
return GetGameItemsByAuthor("fitgirl")
|
||||
|
@ -1,7 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"pcgamedb/model"
|
||||
"game-crawler/model"
|
||||
)
|
||||
|
||||
func GetFreeGOGGameItems() ([]*model.GameItem, error) {
|
||||
|
160
db/game.go
160
db/game.go
@ -5,14 +5,13 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/utils"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"pcgamedb/cache"
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/model"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
@ -132,6 +131,34 @@ func SaveGameItem(item *model.GameItem) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func SaveGameItems(items []*model.GameItem) error {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
|
||||
defer cancel()
|
||||
operations := make([]mongo.WriteModel, len(items))
|
||||
for i, item := range items {
|
||||
if item.ID.IsZero() {
|
||||
item.ID = primitive.NewObjectID()
|
||||
}
|
||||
if item.CreatedAt.IsZero() {
|
||||
item.CreatedAt = time.Now()
|
||||
}
|
||||
item.UpdatedAt = time.Now()
|
||||
item.Size = strings.Replace(item.Size, "gb", "GB", -1)
|
||||
item.Size = strings.Replace(item.Size, "mb", "MB", -1)
|
||||
operations[i] = mongo.NewUpdateOneModel().
|
||||
SetFilter(bson.D{{Key: "_id", Value: item.ID}}).
|
||||
SetUpdate(bson.D{{Key: "$set", Value: item}}).
|
||||
SetUpsert(true)
|
||||
}
|
||||
|
||||
opts := options.BulkWrite().SetOrdered(false)
|
||||
_, err := GameItemCollection.BulkWrite(ctx, operations, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func SaveGameInfo(item *model.GameInfo) error {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
@ -153,7 +180,7 @@ func SaveGameInfo(item *model.GameInfo) error {
|
||||
}
|
||||
|
||||
func SaveGameInfos(items []*model.GameInfo) error {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
|
||||
defer cancel()
|
||||
|
||||
operations := make([]mongo.WriteModel, len(items))
|
||||
@ -181,7 +208,7 @@ func SaveGameInfos(items []*model.GameInfo) error {
|
||||
|
||||
func GetAllGameItems() ([]*model.GameItem, error) {
|
||||
var items []*model.GameItem
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
|
||||
defer cancel()
|
||||
cursor, err := GameItemCollection.Find(ctx, bson.D{})
|
||||
if err != nil {
|
||||
@ -231,6 +258,9 @@ func GetGameItemByID(id primitive.ObjectID) (*model.GameItem, error) {
|
||||
}
|
||||
|
||||
func GetGameItemsByIDs(ids []primitive.ObjectID) ([]*model.GameItem, error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
var items []*model.GameItem
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
@ -254,6 +284,7 @@ func GetGameItemsByIDs(ids []primitive.ObjectID) ([]*model.GameItem, error) {
|
||||
return items, err
|
||||
}
|
||||
|
||||
// SearchGameItems page start from 1, return (items, totalPage, error)
|
||||
func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, int, error) {
|
||||
var items []*model.GameInfo
|
||||
name = removeNoneAlphaNumeric.ReplaceAllString(name, " ")
|
||||
@ -261,6 +292,21 @@ func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, in
|
||||
name = strings.TrimSpace(name)
|
||||
name = strings.Replace(name, " ", ".*", -1)
|
||||
name = fmt.Sprintf("%s.*", name)
|
||||
|
||||
key := fmt.Sprintf("searchGameDetails:%s:%d:%d", name, page, pageSize)
|
||||
val, exist := cache.Get(key)
|
||||
if exist {
|
||||
var data struct {
|
||||
Items []*model.GameInfo
|
||||
TotalPage int
|
||||
}
|
||||
err := json.Unmarshal([]byte(val), &data)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return data.Items, data.TotalPage, nil
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
@ -296,40 +342,19 @@ func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, in
|
||||
if err := cursor.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return items, int(totalPage), nil
|
||||
}
|
||||
|
||||
func SearchGameInfosCache(name string, page int, pageSize int) ([]*model.GameInfo, int, error) {
|
||||
type res struct {
|
||||
jsonBytes, err := json.Marshal(struct {
|
||||
Items []*model.GameInfo
|
||||
TotalPage int
|
||||
}{
|
||||
Items: items,
|
||||
TotalPage: int(totalPage),
|
||||
})
|
||||
if err == nil {
|
||||
_ = cache.SetWithExpire(key, string(jsonBytes), time.Minute*5)
|
||||
}
|
||||
name = strings.ToLower(name)
|
||||
if config.Config.RedisAvaliable {
|
||||
key := fmt.Sprintf("searchGameDetails:%s:%d:%d", name, page, pageSize)
|
||||
val, exist := cache.Get(key)
|
||||
if exist {
|
||||
var data res
|
||||
err := json.Unmarshal([]byte(val), &data)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return data.Items, data.TotalPage, nil
|
||||
} else {
|
||||
data, totalPage, err := SearchGameInfos(name, page, pageSize)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
dataBytes, err := json.Marshal(res{Items: data, TotalPage: totalPage})
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
_ = cache.AddWithExpire(key, string(dataBytes), 5*time.Minute)
|
||||
return data, totalPage, nil
|
||||
}
|
||||
} else {
|
||||
return SearchGameInfos(name, page, pageSize)
|
||||
}
|
||||
|
||||
return items, int(totalPage), nil
|
||||
}
|
||||
|
||||
func GetGameInfoByPlatformID(platform string, id int) (*model.GameInfo, error) {
|
||||
@ -371,22 +396,22 @@ func GetGameInfosByPlatformIDs(platform string, ids []int) ([]*model.GameInfo, e
|
||||
return games, nil
|
||||
}
|
||||
|
||||
func HasGameItemOrganized(id primitive.ObjectID) (bool, []*model.GameInfo) {
|
||||
func HasGameItemOrganized(id primitive.ObjectID) bool {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
filter := bson.M{"games": id}
|
||||
var res []*model.GameInfo
|
||||
cursor, err := GameInfoCollection.Find(ctx, filter)
|
||||
if err != nil {
|
||||
return false, nil
|
||||
return false
|
||||
}
|
||||
if err = cursor.All(ctx, &res); err != nil {
|
||||
return false, nil
|
||||
return false
|
||||
}
|
||||
if len(res) == 0 {
|
||||
return false, nil
|
||||
return false
|
||||
}
|
||||
return true, res
|
||||
return true
|
||||
}
|
||||
|
||||
func GetUnorganizedGameItems(num int) ([]*model.GameItem, error) {
|
||||
@ -458,17 +483,16 @@ func DeduplicateGameItems() ([]primitive.ObjectID, error) {
|
||||
var res []primitive.ObjectID
|
||||
|
||||
pipeline := mongo.Pipeline{
|
||||
bson.D{{Key: "$group", Value: bson.D{
|
||||
{Key: "_id", Value: bson.D{
|
||||
{Key: "raw_name", Value: "$raw_name"},
|
||||
{Key: "download", Value: "$download"},
|
||||
}},
|
||||
{Key: "count", Value: bson.D{{Key: "$sum", Value: 1}}},
|
||||
{Key: "ids", Value: bson.D{{Key: "$push", Value: "$_id"}}},
|
||||
}}},
|
||||
bson.D{{Key: "$match", Value: bson.D{
|
||||
{Key: "count", Value: bson.D{{Key: "$gt", Value: 1}}},
|
||||
}}},
|
||||
bson.D{
|
||||
{Key: "$group",
|
||||
Value: bson.D{
|
||||
{Key: "_id", Value: bson.D{{Key: "url", Value: "$url"}}},
|
||||
{Key: "count", Value: bson.D{{Key: "$sum", Value: 1}}},
|
||||
{Key: "ids", Value: bson.D{{Key: "$push", Value: "$_id"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
bson.D{{Key: "$match", Value: bson.D{{Key: "count", Value: bson.D{{Key: "$gt", Value: 1}}}}}},
|
||||
}
|
||||
|
||||
var qres []queryRes
|
||||
@ -886,3 +910,35 @@ func GetGameInfoByGameItemID(id primitive.ObjectID) (*model.GameInfo, error) {
|
||||
}
|
||||
return res[0], nil
|
||||
}
|
||||
|
||||
func GetOutdatedGameInfos(maxNum int) ([]*model.GameInfo, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
opts := options.Find().SetLimit(int64(maxNum))
|
||||
filter := bson.M{
|
||||
"info_updated_at": bson.M{"$lt": time.Now().Add(-24 * time.Hour * 30)},
|
||||
}
|
||||
cursor, err := GameInfoCollection.Find(ctx, filter, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var res []*model.GameInfo
|
||||
if err = cursor.All(ctx, &res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func MergeGameInfo(oldInfo *model.GameInfo, newInfo *model.GameInfo) {
|
||||
newInfo.ID = oldInfo.ID
|
||||
newInfo.UpdatedAt = time.Now()
|
||||
newInfo.GameIDs = utils.Unique(append(newInfo.GameIDs, oldInfo.GameIDs...))
|
||||
newInfo.IGDBID = oldInfo.IGDBID
|
||||
newInfo.SteamID = oldInfo.SteamID
|
||||
newInfo.CreatedAt = oldInfo.CreatedAt
|
||||
}
|
||||
|
||||
func MergeGameItem(oldItem *model.GameItem, newItem *model.GameItem) {
|
||||
newItem.ID = oldItem.ID
|
||||
newItem.UpdatedAt = time.Now()
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
|
||||
"pcgamedb/model"
|
||||
"game-crawler/model"
|
||||
)
|
||||
|
||||
func ImportGameInfo(filePath string) error {
|
||||
|
@ -1,7 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"pcgamedb/model"
|
||||
"game-crawler/model"
|
||||
)
|
||||
|
||||
func GetOnlineFixGameItems() ([]*model.GameItem, error) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"pcgamedb/model"
|
||||
"game-crawler/model"
|
||||
)
|
||||
|
||||
func GetXatabGameItems() ([]*model.GameItem, error) {
|
||||
|
@ -1,22 +1,22 @@
|
||||
services:
|
||||
pcgamedb:
|
||||
image: nite07/pcgamedb
|
||||
game-crawler:
|
||||
image: nite07/game-crawler
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "8080:8080"
|
||||
environment:
|
||||
- LOG_LEVEL=info
|
||||
- SERVER_PORT=8080
|
||||
- DATABASE_HOST=pcgamedb-mongodb
|
||||
- DATABASE_HOST=game-crawler-mongodb
|
||||
- DATABASE_PORT=27017
|
||||
- DATABASE_USER=root
|
||||
- DATABASE_PASSWORD=password
|
||||
- DATABASE_NAME=pcgamedb
|
||||
- REDIS_HOST=pcgamedb-redis
|
||||
- DATABASE_NAME=game-crawler
|
||||
- REDIS_HOST=game-crawler-redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_DB=0
|
||||
# Read more about environment variables: config/config.go
|
||||
pcgamedb-mongodb:
|
||||
game-crawler-mongodb:
|
||||
image: mongo:latest
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@ -24,7 +24,7 @@ services:
|
||||
MONGO_INITDB_ROOT_PASSWORD: password
|
||||
volumes:
|
||||
- ./mongodb:/data/db
|
||||
pcgamedb-redis:
|
||||
game-crawler-redis:
|
||||
image: redis:latest
|
||||
volumes:
|
||||
- ./redis:/data
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
14
go.mod
14
go.mod
@ -1,6 +1,6 @@
|
||||
module pcgamedb
|
||||
module game-crawler
|
||||
|
||||
go 1.23
|
||||
go 1.23.3
|
||||
|
||||
require (
|
||||
github.com/PuerkitoBio/goquery v1.10.0
|
||||
@ -9,6 +9,7 @@ require (
|
||||
github.com/bogdanfinn/tls-client v1.7.8
|
||||
github.com/btcsuite/btcutil v1.0.2
|
||||
github.com/gin-contrib/cors v1.7.2
|
||||
github.com/gin-contrib/multitemplate v1.0.1
|
||||
github.com/gin-gonic/gin v1.10.0
|
||||
github.com/redis/go-redis/v9 v9.7.0
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
@ -24,6 +25,9 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
git.nite07.com/nite/ccs v0.0.0-20241218080306-1c4bbc6520c3 // indirect
|
||||
github.com/Danny-Dasilva/CycleTLS/cycletls v1.0.26 // indirect
|
||||
github.com/Danny-Dasilva/fhttp v0.0.0-20240217042913-eeeb0b347ce1 // indirect
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca // indirect
|
||||
github.com/anacrolix/missinggo v1.3.0 // indirect
|
||||
@ -48,8 +52,10 @@ require (
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.23.0 // indirect
|
||||
github.com/go-resty/resty/v2 v2.16.2 // indirect
|
||||
github.com/goccy/go-json v0.10.3 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/huandu/xstrings v1.5.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
@ -67,7 +73,8 @@ require (
|
||||
github.com/multiformats/go-multihash v0.2.3 // indirect
|
||||
github.com/multiformats/go-varint v0.0.7 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
||||
github.com/quic-go/quic-go v0.48.1 // indirect
|
||||
github.com/quic-go/quic-go v0.48.2 // indirect
|
||||
github.com/refraction-networking/utls v1.6.7 // indirect
|
||||
github.com/spaolacci/murmur3 v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 // indirect
|
||||
@ -86,5 +93,6 @@ require (
|
||||
golang.org/x/tools v0.27.0 // indirect
|
||||
google.golang.org/protobuf v1.35.2 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
h12.io/socks v1.0.3 // indirect
|
||||
lukechampine.com/blake3 v1.3.0 // indirect
|
||||
)
|
||||
|
280
go.sum
280
go.sum
@ -1,8 +1,37 @@
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.37.0/go.mod h1:TS1dMSSfndXH133OKGwekG838Om/cQT0BUHV3HcBgoo=
|
||||
crawshaw.io/iox v0.0.0-20181124134642-c51c3df30797/go.mod h1:sXBiorCo8c46JlQV3oXPKINnZ8mcqnye1EkVkqsectk=
|
||||
crawshaw.io/sqlite v0.3.2/go.mod h1:igAO5JulrQ1DbdZdtVq48mnZUBAPOeFzer7VhDWNtW4=
|
||||
dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU=
|
||||
dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU=
|
||||
dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4=
|
||||
dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU=
|
||||
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241202053845-550f69c19ba2 h1:hsnRzTS+6Dd+ZFPe486Dg59X0azNOHviDabChc8dKKk=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241202053845-550f69c19ba2/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203153655-eff00a6afd7e h1:SJVt97JqleQiRNP6HpH5OWSneXhBs8bhaQ0Em20KDl4=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203153655-eff00a6afd7e/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203154730-1ecdcb99a31b h1:5g2GTuFxIfba8Tm8liHW6FJQxbjiIu3NmYdr5u7OaiM=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203154730-1ecdcb99a31b/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203155017-60608c26cf61 h1:BjsC5e1NuReTuThi8BT4a4e6SFBQJ3C3R6XchhDF01s=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203155017-60608c26cf61/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203155427-10e314ae7eff h1:f5OEMRc/zhMxSfHdTJt3dW73NELywBlRxShenb/uaUM=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203155427-10e314ae7eff/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203155655-662e1dc6e580 h1:B2ewPM44DgyrkycIrUfyTRLM7mXggA0JE8pNbvxjFKw=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241203155655-662e1dc6e580/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241204132531-f6469471bb6c h1:UcIxgKmcQGZqjTJWsQf9MVDviUQFlU+ZK6HJjpW+nAU=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241204132531-f6469471bb6c/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241204135023-d34ae7399760 h1:ZCMgQt2ILohQ3MSk6RVhnRY4fbQTJVLREbWZjLtVs+Y=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241204135023-d34ae7399760/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241218080306-1c4bbc6520c3 h1:AgG6cZYX7CSzBnJ2jZ9T0ATbutHWbKfrJSPbWRHUW14=
|
||||
git.nite07.com/nite/ccs v0.0.0-20241218080306-1c4bbc6520c3/go.mod h1:+kZxYKbZJ3igYXdgCStq+SocI2Wy0fE0RaGqW6YD71w=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/Danny-Dasilva/CycleTLS/cycletls v1.0.26 h1:6fexoGmvzoXMSk14BZ0AirapVm5c3KUsEjE0jLlVKi8=
|
||||
github.com/Danny-Dasilva/CycleTLS/cycletls v1.0.26/go.mod h1:QFi/EVO7qqru3Ftxz1LR+96jIc91Tifv0DnskF/gWQ8=
|
||||
github.com/Danny-Dasilva/fhttp v0.0.0-20240217042913-eeeb0b347ce1 h1:/lqhaiz7xdPr6kuaW1tQ/8DdpWdxkdyd9W/6EHz4oRw=
|
||||
github.com/Danny-Dasilva/fhttp v0.0.0-20240217042913-eeeb0b347ce1/go.mod h1:Hvab/V/YKCDXsEpKYKHjAXH5IFOmoq9FsfxjztEqvDc=
|
||||
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
||||
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
|
||||
github.com/PuerkitoBio/goquery v1.10.0 h1:6fiXdLuUvYs2OJSvNRqlNPoBm6YABE226xrbavY5Wv4=
|
||||
@ -42,10 +71,12 @@ github.com/anacrolix/tagflag v1.0.0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pm
|
||||
github.com/anacrolix/tagflag v1.1.0/go.mod h1:Scxs9CV10NQatSmbyjqmqmeQNwGzlNe0CMUMIxqHIG8=
|
||||
github.com/anacrolix/torrent v1.57.1 h1:CS8rYfC2Oe15NPBhwCNs/3WBY6HiBCPDFpY+s9aFHbA=
|
||||
github.com/anacrolix/torrent v1.57.1/go.mod h1:NNBg4lP2/us9Hp5+cLNcZRILM69cNoKIkqMGqr9AuR0=
|
||||
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/benbjohnson/immutable v0.2.0/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
@ -57,6 +88,7 @@ github.com/bogdanfinn/tls-client v1.7.8 h1:oAr+Ox0syJjgcY5ibKLb+r1ofabB1WDysNTY5
|
||||
github.com/bogdanfinn/tls-client v1.7.8/go.mod h1:pQwF0eqfL0gf0mu8hikvu6deZ3ijSPruJDzEKEnnXjU=
|
||||
github.com/bogdanfinn/utls v1.6.1 h1:dKDYAcXEyFFJ3GaWaN89DEyjyRraD1qb4osdEK89ass=
|
||||
github.com/bogdanfinn/utls v1.6.1/go.mod h1:VXIbRZaiY/wHZc6Hu+DZ4O2CgTzjhjCg/Ou3V4r/39Y=
|
||||
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
|
||||
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
||||
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
||||
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8=
|
||||
@ -75,6 +107,8 @@ github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVa
|
||||
github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc=
|
||||
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY=
|
||||
github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs=
|
||||
github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s=
|
||||
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
|
||||
github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k=
|
||||
github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
|
||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
@ -83,13 +117,18 @@ github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA=
|
||||
github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys=
|
||||
github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
|
||||
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@ -103,29 +142,40 @@ github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25Kn
|
||||
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
|
||||
github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
|
||||
github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
|
||||
github.com/gaukas/godicttls v0.0.4/go.mod h1:l6EenT4TLWgTdwslVb4sEMOCf7Bv0JAK67deKr9/NCI=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw=
|
||||
github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E=
|
||||
github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4=
|
||||
github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk=
|
||||
github.com/gin-contrib/multitemplate v1.0.1 h1:Asi8boB7NctSoQzbWDosLObon0cYMP5OM+ihQMjlW5M=
|
||||
github.com/gin-contrib/multitemplate v1.0.1/go.mod h1:uU+PnuKoiEHWqB9Zvco+Kqv9KNrsHi6IZOUUgTctMPA=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
github.com/glycerine/goconvey v0.0.0-20190315024820-982ee783a72e/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
|
||||
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
|
||||
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
|
||||
@ -142,7 +192,11 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o=
|
||||
github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-resty/resty/v2 v2.16.2 h1:CpRqTjIzq/rweXUt9+GxzzQdlkqMdt8Lm/fuK/CAbAg=
|
||||
github.com/go-resty/resty/v2 v2.16.2/go.mod h1:0fHAoK7JoBy/Ch36N8VFeMsK7xQOHhvWaC3iOktwmIU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
|
||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
|
||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
@ -150,7 +204,10 @@ github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7a
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
@ -159,25 +216,45 @@ github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:x
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
|
||||
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
|
||||
github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20190309154008-847fc94819f9/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
|
||||
github.com/h12w/go-socks5 v0.0.0-20200522160539-76189e178364/go.mod h1:eDJQioIyy4Yn3MVivT7rv/39gAJTrA7lgmYr8EW950c=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
|
||||
@ -185,8 +262,10 @@ github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63
|
||||
github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
|
||||
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
|
||||
github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
@ -195,11 +274,13 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
|
||||
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
@ -212,16 +293,20 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI=
|
||||
github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
|
||||
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
|
||||
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@ -242,12 +327,47 @@ github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsC
|
||||
github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8=
|
||||
github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
|
||||
github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
|
||||
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
|
||||
github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU=
|
||||
github.com/onsi/ginkgo/v2 v2.1.6/go.mod h1:MEH45j8TBi6u9BMogfbp0stKC5cdGjumZj5Y7AG4VIk=
|
||||
github.com/onsi/ginkgo/v2 v2.2.0/go.mod h1:MEH45j8TBi6u9BMogfbp0stKC5cdGjumZj5Y7AG4VIk=
|
||||
github.com/onsi/ginkgo/v2 v2.3.0/go.mod h1:Eew0uilEqZmIEZr8JrvYlvOM7Rr6xzTmMV8AyFNU9d0=
|
||||
github.com/onsi/ginkgo/v2 v2.4.0/go.mod h1:iHkDK1fKGcBoEHT5W7YBq4RFWaQulw+caOMkAt4OrFo=
|
||||
github.com/onsi/ginkgo/v2 v2.5.0/go.mod h1:Luc4sArBICYCS8THh8v3i3i5CuSZO+RaQRaJoeNwomw=
|
||||
github.com/onsi/ginkgo/v2 v2.7.0/go.mod h1:yjiuMwPokqY1XauOgju45q3sJt6VzQ/Fict1LFVcsAo=
|
||||
github.com/onsi/ginkgo/v2 v2.8.1/go.mod h1:N1/NbDngAFcSLdyZ+/aYTYGSlq9qMCS/cNKGJjy+csc=
|
||||
github.com/onsi/ginkgo/v2 v2.9.0/go.mod h1:4xkjoL/tZv4SMWeww56BU5kAt19mVB47gTWxmrTcxyk=
|
||||
github.com/onsi/ginkgo/v2 v2.9.1/go.mod h1:FEcmzVcCHl+4o9bQZVab+4dC9+j+91t2FHSzmGAPfuo=
|
||||
github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts=
|
||||
github.com/onsi/ginkgo/v2 v2.9.5/go.mod h1:tvAoo1QUJwNEU2ITftXTpR7R1RbCzoZUOs3RonqW57k=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
|
||||
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
|
||||
github.com/onsi/gomega v1.20.1/go.mod h1:DtrZpjmvpn2mPm4YWQa0/ALMDj9v4YxLgojwPeREyVo=
|
||||
github.com/onsi/gomega v1.21.1/go.mod h1:iYAIXgPSaDHak0LCMA+AWBpIKBr8WZicMxnE8luStNc=
|
||||
github.com/onsi/gomega v1.22.1/go.mod h1:x6n7VNe4hw0vkyYUM4mjIXx3JbLiPaBPNgB7PRQ1tuM=
|
||||
github.com/onsi/gomega v1.24.0/go.mod h1:Z/NWtiqwBrwUt4/2loMmHL63EDLnYHmVbuBpDr2vQAg=
|
||||
github.com/onsi/gomega v1.24.1/go.mod h1:3AOiACssS3/MajrniINInwbfOOtfZvplPzuRSmvt1jM=
|
||||
github.com/onsi/gomega v1.26.0/go.mod h1:r+zV744Re+DiYCIPRlYOTxn0YkOLcAnW8k1xXdMPGhM=
|
||||
github.com/onsi/gomega v1.27.1/go.mod h1:aHX5xOykVYzWOV4WqQy0sy8BQptgukenXpCXfadcIAw=
|
||||
github.com/onsi/gomega v1.27.3/go.mod h1:5vG284IBtfDAmDyrK+eGyZmUgUlmi+Wngqo557cZ6Gw=
|
||||
github.com/onsi/gomega v1.27.4/go.mod h1:riYq/GJKh8hhoM01HN6Vmuy93AarCXCBGpvFDK3q3fQ=
|
||||
github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg=
|
||||
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
|
||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
||||
github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE=
|
||||
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
@ -255,6 +375,7 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
|
||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||
@ -263,31 +384,67 @@ github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:
|
||||
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
|
||||
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
||||
github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A=
|
||||
github.com/quic-go/qtls-go1-20 v0.3.1/go.mod h1:X9Nh97ZL80Z+bX/gUXMbipO6OxdiDi58b/fMC9mAL+k=
|
||||
github.com/quic-go/quic-go v0.37.4/go.mod h1:YsbH1r4mSHPJcLF4k4zruUkLBqctEMBDR6VPvcYjIsU=
|
||||
github.com/quic-go/quic-go v0.48.1 h1:y/8xmfWI9qmGTc+lBr4jKRUWLGSlSigv847ULJ4hYXA=
|
||||
github.com/quic-go/quic-go v0.48.1/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs=
|
||||
github.com/quic-go/quic-go v0.48.2 h1:wsKXZPeGWpMpCGSWqOcqpW2wZYic/8T3aqiOID0/KWE=
|
||||
github.com/quic-go/quic-go v0.48.2/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E=
|
||||
github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw=
|
||||
github.com/refraction-networking/utls v1.5.4/go.mod h1:SPuDbBmgLGp8s+HLNc83FuavwZCFoMmExj+ltUHiHUw=
|
||||
github.com/refraction-networking/utls v1.6.7 h1:zVJ7sP1dJx/WtVuITug3qYUq034cDq9B2MR1K67ULZM=
|
||||
github.com/refraction-networking/utls v1.6.7/go.mod h1:BC3O4vQzye5hqpmDTWUqi4P5DDhzJfkV1tdqtawQIH0=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/shurcooL/component v0.0.0-20170202220835-f88ec8f54cc4/go.mod h1:XhFIlyj5a1fBNx5aJTbKoIq0mNaPvOagO+HjB3EtxrY=
|
||||
github.com/shurcooL/events v0.0.0-20181021180414-410e4ca65f48/go.mod h1:5u70Mqkb5O5cxEA8nxTsgrgLehJeAw6Oc4Ab1c/P1HM=
|
||||
github.com/shurcooL/github_flavored_markdown v0.0.0-20181002035957-2122de532470/go.mod h1:2dOwnU2uBioM+SGy2aZoq1f/Sd1l9OkAeAUvjSyvgU0=
|
||||
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
||||
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
||||
github.com/shurcooL/gofontwoff v0.0.0-20180329035133-29b52fc0a18d/go.mod h1:05UtEgK5zq39gLST6uB0cf3NEHjETfB4Fgr3Gx5R9Vw=
|
||||
github.com/shurcooL/gopherjslib v0.0.0-20160914041154-feb6d3990c2c/go.mod h1:8d3azKNyqcHP1GaQE/c6dDgjkgSx2BZ4IoEi4F1reUI=
|
||||
github.com/shurcooL/highlight_diff v0.0.0-20170515013008-09bb4053de1b/go.mod h1:ZpfEhSmds4ytuByIcDnOLkTHGUI6KNqRNPDLHDk+mUU=
|
||||
github.com/shurcooL/highlight_go v0.0.0-20181028180052-98c3abbbae20/go.mod h1:UDKB5a1T23gOMUJrI+uSuH0VRDStOiUVSjBTRDVBVag=
|
||||
github.com/shurcooL/home v0.0.0-20181020052607-80b7ffcb30f9/go.mod h1:+rgNQw2P9ARFAs37qieuu7ohDNQ3gds9msbT2yn85sg=
|
||||
github.com/shurcooL/htmlg v0.0.0-20170918183704-d01228ac9e50/go.mod h1:zPn1wHpTIePGnXSHpsVPWEktKXHr6+SS6x/IKRb7cpw=
|
||||
github.com/shurcooL/httperror v0.0.0-20170206035902-86b7830d14cc/go.mod h1:aYMfkZ6DWSJPJ6c4Wwz3QtW22G7mf/PEgaB9k/ik5+Y=
|
||||
github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/httpgzip v0.0.0-20180522190206-b1c53ac65af9/go.mod h1:919LwcH0M7/W4fcZ0/jy0qGght1GIhqyS/EgWGH2j5Q=
|
||||
github.com/shurcooL/issues v0.0.0-20181008053335-6292fdc1e191/go.mod h1:e2qWDig5bLteJ4fwvDAc2NHzqFEthkqn7aOZAOpj+PQ=
|
||||
github.com/shurcooL/issuesapp v0.0.0-20180602232740-048589ce2241/go.mod h1:NPpHK2TI7iSaM0buivtFUc9offApnI0Alt/K8hcHy0I=
|
||||
github.com/shurcooL/notifications v0.0.0-20181007000457-627ab5aea122/go.mod h1:b5uSkrEVM1jQUspwbixRBhaIjIzL2xazXp6kntxYle0=
|
||||
github.com/shurcooL/octicon v0.0.0-20181028054416-fa4f57f9efb2/go.mod h1:eWdoE5JD4R5UVWDucdOPg1g2fqQRq78IQa9zlOV1vpQ=
|
||||
github.com/shurcooL/reactions v0.0.0-20181006231557-f2e0b4ca5b82/go.mod h1:TCR1lToEk4d2s07G3XGfz2QrgHXg4RJBvjrOozvoWfk=
|
||||
github.com/shurcooL/sanitized_anchor_name v0.0.0-20170918181015-86672fcb3f95/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/shurcooL/users v0.0.0-20180125191416-49c67e49c537/go.mod h1:QJTqeLYEDaXHZDBsXlPCDqdhQuJkuw4NOtaxYe3xii4=
|
||||
github.com/shurcooL/webdavfs v0.0.0-20170829043945-18c3829fa133/go.mod h1:hKmq5kWdCj2z2KEozexVbfEZIWiTjhE0+UjmZgPqehw=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/assertions v0.0.0-20190215210624-980c5ac6f3ac/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190306220146-200a235640ff/go.mod h1:KSQcGKpxUMHk3nbYzs/tIBAM2iDooCn0BmttHOJEbLs=
|
||||
github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE=
|
||||
github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA=
|
||||
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
||||
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
@ -302,6 +459,8 @@ github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
@ -316,6 +475,7 @@ github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A=
|
||||
github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg=
|
||||
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 h1:YqAladjX7xpA6BM04leXMWAEjS0mTZ5kUU9KRBriQJc=
|
||||
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5/go.mod h1:2JjD2zLQYH5HO74y5+aE3remJQvl6q4Sn6aWA2wD1Ng=
|
||||
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
|
||||
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/tinylib/msgp v1.1.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
@ -323,6 +483,8 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU=
|
||||
github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM=
|
||||
github.com/willf/bitset v1.1.9/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
@ -335,9 +497,13 @@ github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZ
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHyIM=
|
||||
go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4=
|
||||
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
|
||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
@ -347,109 +513,210 @@ go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE=
|
||||
golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg=
|
||||
golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw=
|
||||
golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
|
||||
golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=
|
||||
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
|
||||
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20221205204356-47842c84f3db/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo=
|
||||
golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak=
|
||||
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI=
|
||||
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
|
||||
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181106065722-10aee1819953/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
|
||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||
golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
|
||||
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
|
||||
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
|
||||
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
|
||||
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190316082340-a2f829d7f35f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
|
||||
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
|
||||
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181030000716-a0a13e073c7b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA=
|
||||
golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
|
||||
golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
|
||||
golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o=
|
||||
golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y=
|
||||
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20181029155118-b69ba1387ce2/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg=
|
||||
google.golang.org/genproto v0.0.0-20190306203927-b5d61aea6440/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
|
||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
@ -458,6 +725,10 @@ google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io=
|
||||
google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
@ -467,6 +738,7 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
@ -474,11 +746,19 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o=
|
||||
h12.io/socks v1.0.3 h1:Ka3qaQewws4j4/eDQnOdpr4wXsC//dXtWvftlIcCQUo=
|
||||
h12.io/socks v1.0.3/go.mod h1:AIhxy1jOId/XCz9BO+EIgNL2rQiPTBNnOfnVnQ+3Eck=
|
||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
lukechampine.com/blake3 v1.3.0 h1:sJ3XhFINmHSrYCgl958hscfIa3bw8x4DqMP3u1YvoYE=
|
||||
lukechampine.com/blake3 v1.3.0/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k=
|
||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||
sourcegraph.com/sourcegraph/go-diff v0.5.0/go.mod h1:kuch7UrkMzY0X+p9CRK03kfuPQ2zzQcaEFbx8wA8rck=
|
||||
sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0=
|
||||
|
10
log/log.go
10
log/log.go
@ -4,7 +4,7 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/config"
|
||||
"game-crawler/config"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
@ -12,16 +12,14 @@ import (
|
||||
)
|
||||
|
||||
var Logger *zap.Logger
|
||||
var ConsoleLogger *zap.Logger
|
||||
var FileLogger *zap.Logger
|
||||
var TaskLogger *zap.Logger
|
||||
|
||||
func init() {
|
||||
fileCore, consoleCore, combinedCore, taskCore := buildZapCore(getZapLogLevel(config.Config.LogLevel))
|
||||
FileLogger = zap.New(fileCore, zap.AddCaller())
|
||||
ConsoleLogger = zap.New(consoleCore, zap.AddCaller())
|
||||
_, _, combinedCore, taskCore := buildZapCore(getZapLogLevel(config.Config.LogLevel))
|
||||
Logger = zap.New(combinedCore, zap.AddCaller())
|
||||
TaskLogger = zap.New(taskCore, zap.AddCaller())
|
||||
|
||||
zap.ReplaceGlobals(Logger)
|
||||
}
|
||||
|
||||
func buildZapCore(logLevel zapcore.Level) (fileCore zapcore.Core, consoleCore zapcore.Core, combinedCore zapcore.Core, taskCore zapcore.Core) {
|
||||
|
4
main.go
4
main.go
@ -3,8 +3,8 @@ package main
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"pcgamedb/cmd"
|
||||
"pcgamedb/log"
|
||||
"game-crawler/cmd"
|
||||
"game-crawler/log"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
@ -7,33 +7,50 @@ import (
|
||||
)
|
||||
|
||||
type GameInfo struct {
|
||||
ID primitive.ObjectID `json:"id" bson:"_id"`
|
||||
Name string `json:"name" bson:"name"`
|
||||
Description string `json:"description" bson:"description"`
|
||||
Aliases []string `json:"aliases" bson:"aliases"`
|
||||
Developers []string `json:"developers" bson:"developers"`
|
||||
Publishers []string `json:"publishers" bson:"publishers"`
|
||||
IGDBID int `json:"igdb_id" bson:"igdb_id"`
|
||||
SteamID int `json:"steam_id" bson:"steam_id"`
|
||||
Cover string `json:"cover" bson:"cover"`
|
||||
Languages []string `json:"languages" bson:"languages"`
|
||||
Screenshots []string `json:"screenshots" bson:"screenshots"`
|
||||
GameIDs []primitive.ObjectID `json:"game_ids" bson:"games"`
|
||||
Games []*GameItem `json:"game_downloads" bson:"-"`
|
||||
CreatedAt time.Time `json:"created_at" bson:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at" bson:"updated_at"`
|
||||
ID primitive.ObjectID `json:"id" bson:"_id"`
|
||||
Name string `json:"name" bson:"name"`
|
||||
Description string `json:"description" bson:"description"`
|
||||
Aliases []string `json:"aliases" bson:"aliases"`
|
||||
Developers []string `json:"developers" bson:"developers"`
|
||||
Publishers []string `json:"publishers" bson:"publishers"`
|
||||
IGDBID int `json:"igdb_id" bson:"igdb_id"`
|
||||
SteamID int `json:"steam_id" bson:"steam_id"`
|
||||
Cover string `json:"cover" bson:"cover"`
|
||||
Languages []string `json:"languages" bson:"languages"`
|
||||
Screenshots []string `json:"screenshots" bson:"screenshots"`
|
||||
GameIDs []primitive.ObjectID `json:"game_ids" bson:"games"`
|
||||
Games []*GameItem `json:"games" bson:"-"`
|
||||
CreatedAt time.Time `json:"created_at" bson:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at" bson:"updated_at"`
|
||||
FirstReleaseDate time.Time `json:"first_release_date" bson:"first_release_date"`
|
||||
GameEngines []string `json:"game_engines" bson:"game_engines"`
|
||||
GameModes []string `json:"game_modes" bson:"game_modes"`
|
||||
Genres []string `json:"genres" bson:"genres"`
|
||||
Themes []string `json:"themes" bson:"themes"`
|
||||
Platforms []string `json:"platforms" bson:"platforms"`
|
||||
PlayerPerspectives []string `json:"player_perspectives" bson:"player_perspectives"`
|
||||
SimilarGames []int `json:"similar_games" bson:"similar_games"`
|
||||
Videos []string `json:"videos" bson:"videos"`
|
||||
Websites []string `json:"websites" bson:"websites"`
|
||||
Collections []GameCollection `json:"collections" bson:"collections"`
|
||||
}
|
||||
|
||||
type GameCollection struct {
|
||||
Games []int `json:"games"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type GameItem struct {
|
||||
ID primitive.ObjectID `json:"id" bson:"_id"`
|
||||
Name string `json:"speculative_name" bson:"name"`
|
||||
RawName string `json:"raw_name,omitempty" bson:"raw_name"`
|
||||
Download string `json:"download_link,omitempty" bson:"download"`
|
||||
Downloads map[string]string `json:"downloads,omitempty" bson:"downloads"`
|
||||
Size string `json:"size,omitempty" bson:"size"`
|
||||
Url string `json:"url" bson:"url"`
|
||||
Password string `json:"password,omitempty" bson:"password"`
|
||||
Author string `json:"author,omitempty" bson:"author"`
|
||||
UpdateFlag string `json:"-" bson:"update_flag,omitempty"`
|
||||
Platform string `json:"platform,omitempty" bson:"platform"`
|
||||
UpdateFlag string `json:"update_flag,omitempty" bson:"update_flag"`
|
||||
CreatedAt time.Time `json:"created_at" bson:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at" bson:"updated_at"`
|
||||
}
|
||||
|
232
model/igdb.go
232
model/igdb.go
@ -1,66 +1,184 @@
|
||||
package model
|
||||
|
||||
type IGDBGameDetail struct {
|
||||
ID int `json:"id,omitempty"`
|
||||
ParentGame int `json:"parent_game,omitempty"`
|
||||
AgeRatings []int `json:"age_ratings,omitempty"`
|
||||
ID int `json:"id"`
|
||||
AgeRatings []int `json:"age_ratings"`
|
||||
AggregatedRating float64 `json:"aggregated_rating"`
|
||||
AggregatedRatingCount int `json:"aggregated_rating_count"`
|
||||
Artworks []int `json:"artworks"`
|
||||
Category int `json:"category"`
|
||||
Cover struct {
|
||||
ID int `json:"id"`
|
||||
AlphaChannel bool `json:"alpha_channel"`
|
||||
Animated bool `json:"animated"`
|
||||
Game int `json:"game"`
|
||||
Height int `json:"height"`
|
||||
ImageID string `json:"image_id"`
|
||||
URL string `json:"url"`
|
||||
Width int `json:"width"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"cover"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Dlcs []int `json:"dlcs"`
|
||||
ExternalGames []int `json:"external_games"`
|
||||
FirstReleaseDate int `json:"first_release_date"`
|
||||
GameEngines []struct {
|
||||
ID int `json:"id"`
|
||||
Companies []int `json:"companies"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Logo int `json:"logo"`
|
||||
Name string `json:"name"`
|
||||
Platforms []int `json:"platforms"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"game_engines"`
|
||||
GameModes []struct {
|
||||
ID int `json:"id"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"game_modes"`
|
||||
Genres []struct {
|
||||
ID int `json:"id"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"genres"`
|
||||
Hypes int `json:"hypes"`
|
||||
InvolvedCompanies []struct {
|
||||
ID int `json:"id"`
|
||||
Company int `json:"company"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Developer bool `json:"developer"`
|
||||
Game int `json:"game"`
|
||||
Porting bool `json:"porting"`
|
||||
Publisher bool `json:"publisher"`
|
||||
Supporting bool `json:"supporting"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"involved_companies"`
|
||||
Name string `json:"name"`
|
||||
ParentGame int `json:"parent_game"`
|
||||
Platforms []struct {
|
||||
ID int `json:"id"`
|
||||
Abbreviation string `json:"abbreviation"`
|
||||
AlternativeName string `json:"alternative_name"`
|
||||
Category int `json:"category"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Name string `json:"name"`
|
||||
PlatformLogo int `json:"platform_logo"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Versions []int `json:"versions"`
|
||||
Websites []int `json:"websites"`
|
||||
Checksum string `json:"checksum"`
|
||||
Generation int `json:"generation,omitempty"`
|
||||
PlatformFamily int `json:"platform_family,omitempty"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
} `json:"platforms"`
|
||||
PlayerPerspectives []struct {
|
||||
ID int `json:"id"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"player_perspectives"`
|
||||
Rating float64 `json:"rating"`
|
||||
RatingCount int `json:"rating_count"`
|
||||
ReleaseDates []struct {
|
||||
ID int `json:"id"`
|
||||
Category int `json:"category"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Date int `json:"date"`
|
||||
Game int `json:"game"`
|
||||
Human string `json:"human"`
|
||||
M int `json:"m"`
|
||||
Platform int `json:"platform"`
|
||||
Region int `json:"region"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
Y int `json:"y"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"release_dates"`
|
||||
Screenshots []struct {
|
||||
ID int `json:"id"`
|
||||
Game int `json:"game"`
|
||||
Height int `json:"height"`
|
||||
ImageID string `json:"image_id"`
|
||||
URL string `json:"url"`
|
||||
Width int `json:"width"`
|
||||
Checksum string `json:"checksum"`
|
||||
AlphaChannel bool `json:"alpha_channel,omitempty"`
|
||||
Animated bool `json:"animated,omitempty"`
|
||||
} `json:"screenshots"`
|
||||
SimilarGames []int `json:"similar_games"`
|
||||
Slug string `json:"slug"`
|
||||
Summary string `json:"summary"`
|
||||
Tags []int `json:"tags"`
|
||||
Themes []struct {
|
||||
ID int `json:"id"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"themes"`
|
||||
TotalRating float64 `json:"total_rating"`
|
||||
TotalRatingCount int `json:"total_rating_count"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Videos []struct {
|
||||
ID int `json:"id"`
|
||||
Game int `json:"game"`
|
||||
Name string `json:"name"`
|
||||
VideoID string `json:"video_id"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"videos"`
|
||||
Websites []struct {
|
||||
ID int `json:"id"`
|
||||
Category int `json:"category"`
|
||||
Game int `json:"game"`
|
||||
Trusted bool `json:"trusted"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"websites"`
|
||||
Checksum string `json:"checksum"`
|
||||
LanguageSupports []struct {
|
||||
ID int `json:"id"`
|
||||
Game int `json:"game"`
|
||||
Language int `json:"language"`
|
||||
LanguageSupportType int `json:"language_support_type"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
Checksum string `json:"checksum"`
|
||||
} `json:"language_supports"`
|
||||
Collections []struct {
|
||||
ID int `json:"id"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Games []int `json:"games"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Checksum string `json:"checksum"`
|
||||
Type int `json:"type"`
|
||||
} `json:"collections"`
|
||||
VersionParent int `json:"version_parent,omitempty"`
|
||||
VersionTitle string `json:"version_title,omitempty"`
|
||||
AlternativeNames []struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
} `json:"alternative_names,omitempty"`
|
||||
Category int `json:"category,omitempty"`
|
||||
Cover struct {
|
||||
URL string `json:"url,omitempty"`
|
||||
} `json:"cover,omitempty"`
|
||||
CreatedAt int `json:"created_at,omitempty"`
|
||||
ExternalGames []int `json:"external_games,omitempty"`
|
||||
FirstReleaseDate int `json:"first_release_date,omitempty"`
|
||||
Franchises []int `json:"franchises,omitempty"`
|
||||
GameModes []int `json:"game_modes,omitempty"`
|
||||
Genres []int `json:"genres,omitempty"`
|
||||
InvolvedCompanies []struct {
|
||||
Company int `json:"company,omitempty"`
|
||||
Developer bool `json:"developer,omitempty"`
|
||||
Publisher bool `json:"publisher,omitempty"`
|
||||
} `json:"involved_companies,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Platforms []int `json:"platforms,omitempty"`
|
||||
PlayerPerspectives []int `json:"player_perspectives,omitempty"`
|
||||
Rating float64 `json:"rating,omitempty"`
|
||||
RatingCount int `json:"rating_count,omitempty"`
|
||||
ReleaseDates []int `json:"release_dates,omitempty"`
|
||||
Screenshots []struct {
|
||||
URL string `json:"url,omitempty"`
|
||||
} `json:"screenshots,omitempty"`
|
||||
SimilarGames []int `json:"similar_games,omitempty"`
|
||||
Slug string `json:"slug,omitempty"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
Tags []int `json:"tags,omitempty"`
|
||||
Themes []int `json:"themes,omitempty"`
|
||||
TotalRating float64 `json:"total_rating,omitempty"`
|
||||
TotalRatingCount int `json:"total_rating_count,omitempty"`
|
||||
UpdatedAt int `json:"updated_at,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
VersionParent int `json:"version_parent,omitempty"`
|
||||
VersionTitle string `json:"version_title,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Websites []int `json:"websites,omitempty"`
|
||||
GameLocalizations []int `json:"game_localizations,omitempty"`
|
||||
AggregatedRating float64 `json:"aggregated_rating,omitempty"`
|
||||
AggregatedRatingCount int `json:"aggregated_rating_count,omitempty"`
|
||||
Artworks []int `json:"artworks,omitempty"`
|
||||
Bundles []int `json:"bundles,omitempty"`
|
||||
Collection int `json:"collection,omitempty"`
|
||||
GameEngines []int `json:"game_engines,omitempty"`
|
||||
Keywords []int `json:"keywords,omitempty"`
|
||||
MultiplayerModes []int `json:"multiplayer_modes,omitempty"`
|
||||
StandaloneExpansions []int `json:"standalone_expansions,omitempty"`
|
||||
Storyline string `json:"storyline,omitempty"`
|
||||
Videos []int `json:"videos,omitempty"`
|
||||
LanguageSupports []struct {
|
||||
Language int `json:"language,omitempty"`
|
||||
LanguageSupportType int `json:"language_support_type,omitempty"`
|
||||
} `json:"language_supports,omitempty"`
|
||||
Collections []int `json:"collections,omitempty"`
|
||||
}
|
||||
|
||||
type IGDBGameDetails []*IGDBGameDetail
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/log"
|
||||
"pcgamedb/task"
|
||||
"game-crawler/log"
|
||||
"game-crawler/task"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
@ -3,7 +3,7 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"game-crawler/db"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
|
@ -3,7 +3,7 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"game-crawler/db"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
@ -17,7 +17,7 @@ type GetGameItemByRawNameRequest struct {
|
||||
type GetGameItemByRawNameResponse struct {
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message,omitempty"`
|
||||
GameItem []*model.GameItem `json:"game_downloads,omitempty"`
|
||||
GameItem []*model.GameItem `json:"games,omitempty"`
|
||||
}
|
||||
|
||||
// GetGameItemByRawName retrieves game download details by raw name.
|
||||
@ -26,7 +26,7 @@ type GetGameItemByRawNameResponse struct {
|
||||
// @Tags game
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param name path string true "Game Download Raw Name"
|
||||
// @Param name path string true "Game Raw Name"
|
||||
// @Success 200 {object} GetGameItemByRawNameResponse
|
||||
// @Failure 400 {object} GetGameItemByRawNameResponse
|
||||
// @Failure 500 {object} GetGameItemByRawNameResponse
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
@ -19,10 +19,10 @@ type GetGameItemsByAuthorResponse struct {
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message,omitempty"`
|
||||
TotalPage int `json:"total_page"`
|
||||
GameItems []*model.GameItem `json:"game_downloads,omitempty"`
|
||||
GameItems []*model.GameItem `json:"games,omitempty"`
|
||||
}
|
||||
|
||||
// GetGameItemsByAuthorHandler returns all game downloads by author
|
||||
// GetGameItemsByAuthorHandler returns games by author
|
||||
// @Summary Get game downloads by author
|
||||
// @Description Get game downloads by author
|
||||
// @Tags game
|
||||
|
@ -1,11 +1,12 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"net/http"
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type GetPopularGamesResponse struct {
|
||||
@ -20,7 +21,7 @@ type GetPopularGamesResponse struct {
|
||||
// @Tags popular
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param type path string true "Type(igdb-most-visited, igdb-most-wanted-to-play, igdb-most-playing, igdb-most-played, steam-top, v, steam-best-of-the-year, steam-most-played)"
|
||||
// @Param type path string true "Type(igdb-most-visited, igdb-most-wanted-to-play, igdb-most-playing, igdb-most-played, steam-top, steam-week-top, steam-month-top, steam-best-of-the-year, steam-most-played)"
|
||||
// @Success 200 {object} GetPopularGamesResponse
|
||||
// @Failure 400 {object} GetPopularGamesResponse
|
||||
// @Failure 500 {object} GetPopularGamesResponse
|
||||
@ -52,6 +53,8 @@ func GetPopularGameInfosHandler(c *gin.Context) {
|
||||
steam250Func = crawler.GetSteam250BestOfTheYear
|
||||
case "steam-most-played":
|
||||
steam250Func = crawler.GetSteam250MostPlayed
|
||||
case "steam-month-top":
|
||||
steam250Func = crawler.GetSteam250MonthTop50
|
||||
default:
|
||||
c.JSON(http.StatusBadRequest, GetPopularGamesResponse{
|
||||
Status: "error",
|
||||
@ -68,7 +71,9 @@ func GetPopularGameInfosHandler(c *gin.Context) {
|
||||
Message: err.Error(),
|
||||
})
|
||||
}
|
||||
infos = infos[:10]
|
||||
if len(infos) > 10 {
|
||||
infos = infos[:10]
|
||||
}
|
||||
} else {
|
||||
offset := 0
|
||||
for len(infos) < 10 {
|
||||
@ -82,7 +87,7 @@ func GetPopularGameInfosHandler(c *gin.Context) {
|
||||
offset += 20
|
||||
pids := make([]int, 20)
|
||||
for _, id := range ids {
|
||||
pid, err := crawler.GetIGDBAppParentCache(id)
|
||||
pid, err := crawler.GetIGDBAppParent(id)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
@ -97,7 +102,9 @@ func GetPopularGameInfosHandler(c *gin.Context) {
|
||||
}
|
||||
infos = append(infos, newInfos...)
|
||||
}
|
||||
infos = infos[:10]
|
||||
if len(infos) > 10 {
|
||||
infos = infos[:10]
|
||||
}
|
||||
}
|
||||
c.JSON(http.StatusOK, GetPopularGamesResponse{
|
||||
Status: "ok",
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
@ -17,16 +17,16 @@ type GetUnorganizedGameItemsResponse struct {
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message,omitempty"`
|
||||
Size int `json:"size,omitempty"`
|
||||
GameItems []*model.GameItem `json:"game_downloads,omitempty"`
|
||||
GameItems []*model.GameItem `json:"games,omitempty"`
|
||||
}
|
||||
|
||||
// GetUnorganizedGameItems retrieves a list of unorganized game downloads.
|
||||
// @Summary List unorganized game downloads
|
||||
// @Description Retrieves game downloads that have not been organized
|
||||
// GetUnorganizedGameItems retrieves a list of unorganized games.
|
||||
// @Summary List unorganized games
|
||||
// @Description Retrieves games that have not been organized
|
||||
// @Tags game
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param num query int false "Number of game downloads to retrieve"
|
||||
// @Param num query int false "Number of games to retrieve"
|
||||
// @Success 200 {object} GetUnorganizedGameItemsResponse
|
||||
// @Failure 400 {object} GetUnorganizedGameItemsResponse
|
||||
// @Failure 500 {object} GetUnorganizedGameItemsResponse
|
||||
|
@ -6,28 +6,24 @@ import (
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/constant"
|
||||
"pcgamedb/db"
|
||||
"game-crawler/config"
|
||||
"game-crawler/constant"
|
||||
"game-crawler/db"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type HealthCheckResponse struct {
|
||||
Version string `json:"version"`
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message,omitempty"`
|
||||
Date string `json:"date"`
|
||||
Uptime string `json:"uptime"`
|
||||
Alloc string `json:"alloc"`
|
||||
AutoCrawl bool `json:"auto_crawl"`
|
||||
AutoCrawlCron string `json:"auto_crawl_cron,omitempty"`
|
||||
GameItem int64 `json:"game_download,omitempty"`
|
||||
GameInfo int64 `json:"game_info,omitempty"`
|
||||
Unorganized int64 `json:"unorganized,omitempty"`
|
||||
RedisAvaliable bool `json:"redis_avaliable"`
|
||||
OnlineFixAvaliable bool `json:"online_fix_avaliable"`
|
||||
MegaAvaliable bool `json:"mega_avaliable"`
|
||||
Version string `json:"version"`
|
||||
Status string `json:"status"`
|
||||
Date string `json:"date"`
|
||||
Uptime string `json:"uptime"`
|
||||
Alloc string `json:"alloc"`
|
||||
AutoCrawl bool `json:"auto_crawl"`
|
||||
AutoCrawlCron string `json:"auto_crawl_cron"`
|
||||
GameItem int64 `json:"game_num"`
|
||||
GameInfo int64 `json:"game_info_num"`
|
||||
Unorganized int64 `json:"unorganized_game_num"`
|
||||
}
|
||||
|
||||
// HealthCheckHandler performs a health check of the service.
|
||||
@ -50,18 +46,15 @@ func HealthCheckHandler(c *gin.Context) {
|
||||
unorganizedCount = int64(len(unorganized))
|
||||
}
|
||||
c.JSON(http.StatusOK, HealthCheckResponse{
|
||||
Status: "ok",
|
||||
Version: constant.Version,
|
||||
Date: time.Now().Format("2006-01-02 15:04:05"),
|
||||
Uptime: time.Since(config.Runtime.ServerStartTime).String(),
|
||||
AutoCrawl: config.Config.Server.AutoCrawl,
|
||||
AutoCrawlCron: config.Config.Server.AutoCrawlCron,
|
||||
Alloc: fmt.Sprintf("%.2f MB", float64(m.Alloc)/1024.0/1024.0),
|
||||
GameItem: downloadCount,
|
||||
GameInfo: infoCount,
|
||||
Unorganized: unorganizedCount,
|
||||
RedisAvaliable: config.Config.RedisAvaliable,
|
||||
OnlineFixAvaliable: config.Config.OnlineFixAvaliable,
|
||||
MegaAvaliable: config.Config.MegaAvaliable,
|
||||
Status: "ok",
|
||||
Version: constant.Version,
|
||||
Date: time.Now().Format("2006-01-02 15:04:05"),
|
||||
Uptime: time.Since(config.Runtime.ServerStartTime).String(),
|
||||
AutoCrawl: config.Config.Server.AutoCrawl,
|
||||
AutoCrawlCron: config.Config.Server.AutoCrawlCron,
|
||||
Alloc: fmt.Sprintf("%.2f MB", float64(m.Alloc)/1024.0/1024.0),
|
||||
GameItem: downloadCount,
|
||||
GameInfo: infoCount,
|
||||
Unorganized: unorganizedCount,
|
||||
})
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
|
@ -3,8 +3,8 @@ package handler
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
@ -53,7 +53,7 @@ func SearchGamesHandler(c *gin.Context) {
|
||||
if req.PageSize > 10 {
|
||||
req.PageSize = 10
|
||||
}
|
||||
items, totalPage, err := db.SearchGameInfosCache(req.Keyword, req.Page, req.PageSize)
|
||||
items, totalPage, err := db.SearchGameInfos(req.Keyword, req.Page, req.PageSize)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, SearchGamesResponse{
|
||||
Status: "error",
|
||||
|
@ -1,100 +0,0 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
)
|
||||
|
||||
type UpdateGameInfoRequest struct {
|
||||
GameID string `json:"game_id" binding:"required"`
|
||||
Platform string `json:"platform" binding:"required"`
|
||||
PlatformID int `json:"platform_id" binding:"required"`
|
||||
}
|
||||
|
||||
type UpdateGameInfoResponse struct {
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message"`
|
||||
GameInfo *model.GameInfo `json:"game_info,omitempty"`
|
||||
}
|
||||
|
||||
// UpdateGameInfoHandler updates game information.
|
||||
// @Summary Update game info
|
||||
// @Description Updates details of a game
|
||||
// @Tags game
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param Authorization header string true "Authorization: Bearer <api_key>"
|
||||
// @Param body body handler.UpdateGameInfoRequest true "Update Game Info Request"
|
||||
// @Success 200 {object} handler.UpdateGameInfoResponse
|
||||
// @Failure 400 {object} handler.UpdateGameInfoResponse
|
||||
// @Failure 401 {object} handler.UpdateGameInfoResponse
|
||||
// @Failure 500 {object} handler.UpdateGameInfoResponse
|
||||
// @Router /game/update [post]
|
||||
func UpdateGameInfoHandler(c *gin.Context) {
|
||||
var req UpdateGameInfoRequest
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, UpdateGameInfoResponse{
|
||||
Status: "error",
|
||||
Message: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
req.Platform = strings.ToLower(req.Platform)
|
||||
platformMap := map[string]bool{
|
||||
"steam": true,
|
||||
"igdb": true,
|
||||
}
|
||||
if _, ok := platformMap[req.Platform]; !ok {
|
||||
c.JSON(http.StatusBadRequest, UpdateGameInfoResponse{
|
||||
Status: "error",
|
||||
Message: "Invalid platform",
|
||||
})
|
||||
return
|
||||
}
|
||||
objID, err := primitive.ObjectIDFromHex(req.GameID)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, UpdateGameInfoResponse{
|
||||
Status: "error",
|
||||
Message: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
info, err := db.GetGameInfoByID(objID)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, UpdateGameInfoResponse{
|
||||
Status: "error",
|
||||
Message: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
newInfo, err := crawler.GenerateGameInfo(req.Platform, req.PlatformID)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, UpdateGameInfoResponse{
|
||||
Status: "error",
|
||||
Message: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
newInfo.ID = objID
|
||||
newInfo.GameIDs = info.GameIDs
|
||||
err = db.SaveGameInfo(newInfo)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, UpdateGameInfoResponse{
|
||||
Status: "error",
|
||||
Message: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, UpdateGameInfoResponse{
|
||||
Status: "ok",
|
||||
Message: "Game info updated successfully",
|
||||
GameInfo: newInfo,
|
||||
})
|
||||
}
|
@ -4,7 +4,7 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"pcgamedb/config"
|
||||
"game-crawler/config"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
@ -1,12 +1,11 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"fmt"
|
||||
"game-crawler/log"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"pcgamedb/log"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -14,34 +13,61 @@ import (
|
||||
func Logger() gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
startTime := time.Now()
|
||||
path := c.Request.URL.Path
|
||||
raw := c.Request.URL.RawQuery
|
||||
|
||||
c.Next()
|
||||
|
||||
endTime := time.Now()
|
||||
latencyTime := endTime.Sub(startTime).Milliseconds()
|
||||
reqMethod := c.Request.Method
|
||||
reqURI := c.Request.RequestURI
|
||||
statusCode := c.Writer.Status()
|
||||
clientIP := c.ClientIP()
|
||||
|
||||
if strings.HasPrefix(reqURI, "/swagger/") ||
|
||||
strings.EqualFold(reqURI, "/favicon.ico") {
|
||||
if shouldSkipLog(path) {
|
||||
return
|
||||
}
|
||||
|
||||
log.Logger.Info(
|
||||
"request",
|
||||
zap.Int("code", statusCode),
|
||||
zap.String("method", reqMethod),
|
||||
zap.String("uri", reqURI),
|
||||
zap.String("ip", clientIP),
|
||||
zap.String("latency", strconv.Itoa(int(latencyTime))+"ms"),
|
||||
)
|
||||
if raw != "" {
|
||||
path = path + "?" + raw
|
||||
}
|
||||
|
||||
fields := []zap.Field{
|
||||
zap.Int("status", c.Writer.Status()),
|
||||
zap.String("method", c.Request.Method),
|
||||
zap.String("path", path),
|
||||
zap.String("ip", getRealIP(c)),
|
||||
zap.String("latency", fmt.Sprintf("%v ms", time.Since(startTime).Milliseconds())),
|
||||
}
|
||||
|
||||
if len(c.Errors) > 0 {
|
||||
for _, e := range c.Errors.Errors() {
|
||||
log.Logger.Error(e)
|
||||
}
|
||||
fields = append(fields, zap.Strings("errors", c.Errors.Errors()))
|
||||
}
|
||||
log.Logger.Info("Request", fields...)
|
||||
}
|
||||
}
|
||||
|
||||
func getRealIP(c *gin.Context) string {
|
||||
if ip := c.GetHeader("X-Real-IP"); ip != "" {
|
||||
return ip
|
||||
}
|
||||
if ip := c.GetHeader("X-Forwarded-For"); ip != "" {
|
||||
if index := strings.Index(ip, ","); index != -1 {
|
||||
return strings.TrimSpace(ip[:index])
|
||||
}
|
||||
return ip
|
||||
}
|
||||
if ip := c.GetHeader("X-Originating-IP"); ip != "" {
|
||||
return ip
|
||||
}
|
||||
return c.ClientIP()
|
||||
}
|
||||
|
||||
func shouldSkipLog(path string) bool {
|
||||
skipPaths := []string{
|
||||
"/swagger/",
|
||||
"/favicon.ico",
|
||||
"/health",
|
||||
}
|
||||
|
||||
for _, p := range skipPaths {
|
||||
if strings.HasPrefix(path, p) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ package middleware
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"pcgamedb/log"
|
||||
"game-crawler/log"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.uber.org/zap"
|
||||
|
171
server/route.go
171
server/route.go
@ -1,25 +1,177 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"pcgamedb/server/handler"
|
||||
"pcgamedb/server/middleware"
|
||||
"embed"
|
||||
"errors"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
"game-crawler/server/handler"
|
||||
"game-crawler/server/middleware"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-contrib/cors"
|
||||
"github.com/gin-contrib/multitemplate"
|
||||
"github.com/gin-gonic/gin"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"pcgamedb/docs"
|
||||
"game-crawler/docs"
|
||||
|
||||
swaggerfiles "github.com/swaggo/files"
|
||||
ginSwagger "github.com/swaggo/gin-swagger"
|
||||
)
|
||||
|
||||
//go:embed templates templates/layouts static
|
||||
var frontendFS embed.FS
|
||||
|
||||
func initRoute(app *gin.Engine) {
|
||||
app.Use(cors.New(cors.Config{
|
||||
AllowAllOrigins: true,
|
||||
}))
|
||||
|
||||
GameInfoGroup := app.Group("/game")
|
||||
initFrontend(app)
|
||||
initApi(app)
|
||||
}
|
||||
|
||||
func initFrontend(app *gin.Engine) {
|
||||
app.Use(middleware.Logger())
|
||||
|
||||
// Load static files
|
||||
staticFs, err := fs.Sub(frontendFS, "static")
|
||||
if err != nil {
|
||||
log.Logger.Fatal("Error loading static files", zap.Error(err))
|
||||
return
|
||||
}
|
||||
app.StaticFS("/static", http.FS(staticFs))
|
||||
|
||||
// Load templates
|
||||
// directly using templates app.LoadHTMLFiles() to load all templates leads to error
|
||||
// because use templates with same name in different html file will case overridden
|
||||
// so we need to load all templates manually and use multitemplate to render them
|
||||
r := multitemplate.NewRenderer()
|
||||
|
||||
layoutFiles, err := frontendFS.ReadDir("templates/layouts")
|
||||
if err != nil {
|
||||
log.Logger.Fatal("Error loading layout templates", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
rootFiles, err := frontendFS.ReadDir("templates")
|
||||
if err != nil {
|
||||
log.Logger.Fatal("Error loading root templates", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
for _, rootFile := range rootFiles {
|
||||
if rootFile.IsDir() {
|
||||
continue
|
||||
}
|
||||
name := filepath.Base(rootFile.Name())
|
||||
templateFiles := []string{"templates/" + name}
|
||||
for _, layout := range layoutFiles {
|
||||
if !layout.IsDir() {
|
||||
templateFiles = append(templateFiles, "templates/layouts/"+layout.Name())
|
||||
}
|
||||
}
|
||||
r.AddFromFS(name, frontendFS, templateFiles...)
|
||||
}
|
||||
|
||||
app.HTMLRender = r
|
||||
|
||||
// Load routes
|
||||
app.GET("/", func(ctx *gin.Context) {
|
||||
monthTop, err := crawler.GetSteam250MonthTop50()
|
||||
if err != nil {
|
||||
ctx.HTML(500, "500.html", err)
|
||||
return
|
||||
}
|
||||
mostPlayed, err := crawler.GetSteam250MostPlayed()
|
||||
if err != nil {
|
||||
ctx.HTML(500, "500.html", err)
|
||||
return
|
||||
}
|
||||
bestOfTheYear, err := crawler.GetSteam250BestOfTheYear()
|
||||
if err != nil {
|
||||
ctx.HTML(500, "500.html", err)
|
||||
return
|
||||
}
|
||||
ctx.HTML(200, "index.html", gin.H{
|
||||
"MonthTop": monthTop,
|
||||
"MostPlayed": mostPlayed,
|
||||
"BestOfTheYear": bestOfTheYear,
|
||||
})
|
||||
})
|
||||
|
||||
app.GET("/game/:id", func(ctx *gin.Context) {
|
||||
idStr := ctx.Param("id")
|
||||
id, err := primitive.ObjectIDFromHex(idStr)
|
||||
if err != nil {
|
||||
ctx.HTML(400, "400.html", nil)
|
||||
return
|
||||
}
|
||||
info, err := db.GetGameInfoByID(id)
|
||||
if err != nil {
|
||||
ctx.HTML(500, "500.html", err)
|
||||
return
|
||||
}
|
||||
games, err := db.GetGameItemsByIDs(info.GameIDs)
|
||||
if err != nil {
|
||||
ctx.HTML(500, "500.html", err)
|
||||
return
|
||||
}
|
||||
info.Games = games
|
||||
ctx.HTML(200, "game.html", info)
|
||||
})
|
||||
|
||||
app.GET("/search", func(ctx *gin.Context) {
|
||||
key := ctx.Query("key")
|
||||
page := ctx.Query("page")
|
||||
key = strings.TrimSpace(key)
|
||||
if len(key) < 2 {
|
||||
ctx.HTML(400, "400.html", errors.New("search key should be at least 2 characters long"))
|
||||
return
|
||||
}
|
||||
if page == "" {
|
||||
page = "1"
|
||||
}
|
||||
pageInt, err := strconv.Atoi(page)
|
||||
if err != nil {
|
||||
ctx.HTML(400, "400.html", err)
|
||||
return
|
||||
}
|
||||
games, totalPage, err := db.SearchGameInfos(key, pageInt, 10)
|
||||
if err != nil {
|
||||
ctx.HTML(500, "500.html", err)
|
||||
return
|
||||
}
|
||||
res := gin.H{
|
||||
"Games": games,
|
||||
"TotalPage": totalPage,
|
||||
"CurrentPage": pageInt,
|
||||
"Key": key,
|
||||
}
|
||||
if pageInt > 1 {
|
||||
res["PrevPage"] = pageInt - 1
|
||||
}
|
||||
if pageInt < totalPage {
|
||||
res["NextPage"] = pageInt + 1
|
||||
}
|
||||
ctx.HTML(200, "search.html", res)
|
||||
})
|
||||
}
|
||||
|
||||
func initApi(app *gin.Engine) {
|
||||
apiGroup := app.Group("/api")
|
||||
|
||||
GameInfoGroup := apiGroup.Group("/game")
|
||||
GameInfoGroup.Use(middleware.Logger())
|
||||
GameItemGroup := GameInfoGroup.Group("/raw")
|
||||
GameItemGroup.Use(middleware.Logger())
|
||||
|
||||
GameItemGroup.GET("/unorganized", handler.GetUnorganizedGameItemsHandler)
|
||||
GameItemGroup.POST("/organize", middleware.Auth(), handler.OrganizeGameItemHandler)
|
||||
@ -31,14 +183,13 @@ func initRoute(app *gin.Engine) {
|
||||
GameInfoGroup.GET("/name/:name", handler.GetGameInfosByNameHandler)
|
||||
GameInfoGroup.GET("/platform/:platform_type/:platform_id", handler.GetGameInfoByPlatformIDHandler)
|
||||
GameInfoGroup.GET("/id/:id", handler.GetGameInfoByIDHandler)
|
||||
GameInfoGroup.PUT("/update", middleware.Auth(), handler.UpdateGameInfoHandler)
|
||||
GameInfoGroup.DELETE("/id/:id", middleware.Auth(), handler.DeleteGameInfoHandler)
|
||||
|
||||
app.GET("/popular/:type", handler.GetPopularGameInfosHandler)
|
||||
app.GET("/healthcheck", handler.HealthCheckHandler)
|
||||
app.GET("/author", handler.GetAllAuthorsHandler)
|
||||
app.POST("/clean", middleware.Auth(), handler.CleanGameHandler)
|
||||
apiGroup.GET("/popular/:type", middleware.Logger(), handler.GetPopularGameInfosHandler)
|
||||
apiGroup.GET("/healthcheck", handler.HealthCheckHandler)
|
||||
apiGroup.GET("/author", middleware.Logger(), handler.GetAllAuthorsHandler)
|
||||
apiGroup.POST("/clean", middleware.Logger(), middleware.Auth(), handler.CleanGameHandler)
|
||||
|
||||
docs.SwaggerInfo.BasePath = "/api"
|
||||
app.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerfiles.Handler))
|
||||
apiGroup.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerfiles.Handler))
|
||||
}
|
||||
|
@ -4,12 +4,12 @@ import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"pcgamedb/cache"
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/log"
|
||||
"pcgamedb/server/middleware"
|
||||
"pcgamedb/task"
|
||||
"game-crawler/cache"
|
||||
"game-crawler/config"
|
||||
"game-crawler/db"
|
||||
"game-crawler/log"
|
||||
"game-crawler/server/middleware"
|
||||
"game-crawler/task"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/robfig/cron/v3"
|
||||
@ -26,10 +26,11 @@ func Run() {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
gin.DefaultWriter = io.Discard
|
||||
app := gin.New()
|
||||
app.Use(middleware.Logger())
|
||||
app.Use(middleware.Recovery())
|
||||
initRoute(app)
|
||||
log.Logger.Info("Server running", zap.String("port", config.Config.Server.Port))
|
||||
|
||||
// Start auto-crawl task
|
||||
if config.Config.Server.AutoCrawl {
|
||||
go func() {
|
||||
c := cron.New()
|
||||
@ -40,6 +41,17 @@ func Run() {
|
||||
c.Start()
|
||||
}()
|
||||
}
|
||||
|
||||
// Start auto-update task
|
||||
go func() {
|
||||
c := cron.New()
|
||||
_, err := c.AddFunc("0 */3 * * *", func() { task.UpdateOutdatedGameInfos(log.TaskLogger) })
|
||||
if err != nil {
|
||||
log.Logger.Error("Error adding cron job", zap.Error(err))
|
||||
}
|
||||
c.Start()
|
||||
}()
|
||||
|
||||
err := app.Run(":" + config.Config.Server.Port)
|
||||
if err != nil {
|
||||
log.Logger.Panic("Failed to run server", zap.Error(err))
|
||||
|
BIN
server/static/favicon.png
Normal file
BIN
server/static/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
7
server/templates/400.html
Normal file
7
server/templates/400.html
Normal file
@ -0,0 +1,7 @@
|
||||
{{template "base" .}}
|
||||
{{define "title"}}Bad Request{{end}}
|
||||
{{define "styles"}} {{end}}
|
||||
{{define "content"}}
|
||||
Bad Request
|
||||
{{.}}
|
||||
{{end}}
|
4
server/templates/404.html
Normal file
4
server/templates/404.html
Normal file
@ -0,0 +1,4 @@
|
||||
{{template "base" .}}
|
||||
{{define "title"}}Page Not Found{{end}}
|
||||
{{define "styles"}} {{end}}
|
||||
{{define "content"}} Page Not Found {{end}}
|
4
server/templates/500.html
Normal file
4
server/templates/500.html
Normal file
@ -0,0 +1,4 @@
|
||||
{{template "base" .}}
|
||||
{{define "title"}}Server Error{{end}}
|
||||
{{define "styles"}}{{end}}
|
||||
{{define "content"}} {{ . }} {{end}}
|
287
server/templates/game.html
Normal file
287
server/templates/game.html
Normal file
@ -0,0 +1,287 @@
|
||||
{{template "base" .}}
|
||||
|
||||
{{define "title"}}{{.Name}} - Details{{end}}
|
||||
|
||||
{{define "styles"}}
|
||||
<style>
|
||||
.game-cover {
|
||||
max-height: 400px;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.screenshot-gallery {
|
||||
height: 600px;
|
||||
}
|
||||
|
||||
.swiper-slide img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.info-label {
|
||||
font-weight: bold;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.download-card {
|
||||
transition: transform 0.2s;
|
||||
}
|
||||
|
||||
.download-card:hover {
|
||||
transform: translateY(-3px);
|
||||
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.tag {
|
||||
display: inline-block;
|
||||
padding: 0.25rem 0.5rem;
|
||||
margin: 0.25rem;
|
||||
background-color: #f8f9fa;
|
||||
border-radius: 0.25rem;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
/* Masonry Container Styles */
|
||||
.masonry-container {
|
||||
column-count: 2;
|
||||
/* 3 Columns for Masonry */
|
||||
column-gap: 1rem;
|
||||
/* Adjust Gap Between Columns */
|
||||
}
|
||||
|
||||
@media (max-width: 576px) {
|
||||
.masonry-container {
|
||||
column-count: 1;
|
||||
/* 1 Column on Small Screens */
|
||||
}
|
||||
}
|
||||
|
||||
/* Masonry Item */
|
||||
.masonry-container .card {
|
||||
display: inline-block;
|
||||
/* Ensure Cards Behave as Block Elements in Columns */
|
||||
width: 100%;
|
||||
/* Make Cards Fill the Column Width */
|
||||
}
|
||||
</style>
|
||||
{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<!-- Game Details -->
|
||||
<div class="container py-4">
|
||||
<!-- Basic Info -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-md-4">
|
||||
{{if .Cover}}
|
||||
<img src="{{.Cover}}" class="img-fluid rounded game-cover" alt="{{.Name}}" />
|
||||
{{else}}
|
||||
<div class="game-cover bg-secondary d-flex align-items-center justify-content-center rounded">
|
||||
<span class="text-white">No Image</span>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
<div class="col-md-8">
|
||||
<h1 class="mb-3">{{.Name}}</h1>
|
||||
|
||||
{{if .Aliases}}
|
||||
<div>
|
||||
<span class="info-label">Aliases:</span>
|
||||
{{range .Aliases}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .Developers}}
|
||||
<div>
|
||||
<span class="info-label">Developers:</span>
|
||||
{{range .Developers}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .Publishers}}
|
||||
<div>
|
||||
<span class="info-label">Publishers:</span>
|
||||
{{range .Publishers}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .Languages}}
|
||||
<div>
|
||||
<span class="info-label">Languages:</span>
|
||||
{{range .Languages}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .GameEngines}}
|
||||
<div>
|
||||
<span class="info-label">Engines:</span>
|
||||
{{range .GameEngines}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .Genres}}
|
||||
<div>
|
||||
<span class="info-label">Genres:</span>
|
||||
{{range .Genres}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .Themes}}
|
||||
<div>
|
||||
<span class="info-label">Themes:</span>
|
||||
{{range .Themes}}
|
||||
<span class="tag">{{.}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .Description}}
|
||||
<div>
|
||||
<p>{{.Description}}</p>
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .SteamID}}
|
||||
<div>
|
||||
<a href="https://store.steampowered.com/app/{{.SteamID}}" target="_blank" class="btn btn-primary">
|
||||
Steam
|
||||
</a>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Screenshots -->
|
||||
{{if .Screenshots}}
|
||||
<div class="mb-4">
|
||||
<div class="swiper screenshot-gallery">
|
||||
<div class="swiper-wrapper">
|
||||
{{range .Screenshots}}
|
||||
<div class="swiper-slide">
|
||||
<img src="{{.}}" alt="screenshot" />
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
<div class="swiper-pagination"></div>
|
||||
<div class="swiper-button-next"></div>
|
||||
<div class="swiper-button-prev"></div>
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
<!-- Download Links -->
|
||||
{{if .Games}}
|
||||
<div class="mb-4">
|
||||
<div class="col-12">
|
||||
<!-- Masonry Container -->
|
||||
<div class="masonry-container">
|
||||
{{range .Games}}
|
||||
<div class="card download-card mb-3">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title"><a class="text-decoration-none" href="{{.Url}}">{{.RawName}}</a></h5>
|
||||
{{if .Size}}
|
||||
<div class="card-text">
|
||||
<small class="text-muted">Size: {{.Size}}</small>
|
||||
</div>
|
||||
{{end}}
|
||||
{{if .Author}}
|
||||
<div class="card-text">
|
||||
<small class="text-muted">Source: {{.Author}}</small>
|
||||
</div>
|
||||
{{end}}
|
||||
{{if .Platform}}
|
||||
<div class="card-text">
|
||||
<small class="text-muted">Platform: {{.Platform}}</small>
|
||||
</div>
|
||||
{{end}}
|
||||
{{if .Password}}
|
||||
<div class="card-text">
|
||||
<small class="text-muted">Unzip password: <code>{{.Password}}</code></small>
|
||||
</div>
|
||||
{{end}}
|
||||
{{if .UpdatedAt}}
|
||||
<div class="card-text">
|
||||
<small class="text-muted">Updated: {{.UpdatedAt}}</small>
|
||||
</div>
|
||||
{{end}}
|
||||
<table class="table table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Label</th>
|
||||
<th>Link</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
<tbody>
|
||||
{{range $key, $value := .Downloads}}
|
||||
<tr>
|
||||
<td>{{$key}}</td>
|
||||
<td>
|
||||
<div class="input-group mb-3">
|
||||
<input class="form-control" type="text" value="{{$value}}" readonly>
|
||||
<button class="btn btn-outline-secondary btn-sm" type="button"
|
||||
onclick="copyToClipboard(this, '{{$value}}')">
|
||||
Copy
|
||||
</button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{define "scripts"}}
|
||||
<script src="https://cdn.jsdelivr.net/npm/swiper@8/swiper-bundle.min.js"></script>
|
||||
<script>
|
||||
const swiper = new Swiper(".screenshot-gallery", {
|
||||
slidesPerView: 1,
|
||||
spaceBetween: 30,
|
||||
loop: true,
|
||||
pagination: {
|
||||
el: ".swiper-pagination",
|
||||
clickable: true,
|
||||
},
|
||||
navigation: {
|
||||
nextEl: ".swiper-button-next",
|
||||
prevEl: ".swiper-button-prev",
|
||||
},
|
||||
});
|
||||
|
||||
function copyToClipboard(button, text) {
|
||||
const el = document.createElement("textarea");
|
||||
el.value = text;
|
||||
document.body.appendChild(el);
|
||||
el.select();
|
||||
document.execCommand("copy");
|
||||
document.body.removeChild(el);
|
||||
|
||||
button.textContent = "Copied";
|
||||
button.disabled = true;
|
||||
setTimeout(() => {
|
||||
button.textContent = "Copy";
|
||||
button.disabled = false;
|
||||
}, 2000);
|
||||
}
|
||||
</script>
|
||||
{{end}}
|
137
server/templates/index.html
Normal file
137
server/templates/index.html
Normal file
@ -0,0 +1,137 @@
|
||||
{{template "base" .}}
|
||||
|
||||
{{define "title"}}GameDB{{end}}
|
||||
|
||||
{{define "styles"}}
|
||||
<style>
|
||||
.game-card {
|
||||
height: 100%;
|
||||
transition: transform 0.2s;
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
.game-card:hover {
|
||||
transform: translateY(-5px);
|
||||
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.game-cover {
|
||||
height: 200px;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.game-description {
|
||||
display: -webkit-box;
|
||||
line-clamp: 3;
|
||||
-webkit-line-clamp: 3;
|
||||
-webkit-box-orient: vertical;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
@media (min-width: 992px) {
|
||||
.col-lg-5-item {
|
||||
flex: 0 0 20%;
|
||||
max-width: 20%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<!-- Games Grid -->
|
||||
<div class="container py-4">
|
||||
<h2 class="mb-4">Month Top</h2>
|
||||
<div class="row g-4">
|
||||
{{range .MonthTop}}
|
||||
<div class="col-12 col-sm-6 col-md-4 col-lg-5-item">
|
||||
<a href="/game/{{.ID.Hex}}" class="card game-card">
|
||||
{{if .Cover}}
|
||||
<img src="{{.Cover}}" class="card-img-top game-cover" alt="{{.Name}}" />
|
||||
{{else}}
|
||||
<div class="card-img-top game-cover bg-secondary d-flex align-items-center justify-content-center">
|
||||
<span class="text-white">No Image</span>
|
||||
</div>
|
||||
{{end}}
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">{{.Name}}</h5>
|
||||
<p class="card-text game-description">{{.Description}}</p>
|
||||
{{if .Publishers}}
|
||||
<div class="publishers mb-2">
|
||||
<small class="text-muted">Publishers:</small>
|
||||
{{range $index, $publisher := .Publishers}} {{if $index}}, {{end}}
|
||||
<small class="text-muted">{{$publisher}}</small>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="container py-4">
|
||||
<h2 class="mb-4">Most Played</h2>
|
||||
<div class="row g-4">
|
||||
{{range .MostPlayed}}
|
||||
<div class="col-12 col-sm-6 col-md-4 col-lg-5-item">
|
||||
<a href="/game/{{.ID.Hex}}" class="card game-card">
|
||||
{{if .Cover}}
|
||||
<img src="{{.Cover}}" class="card-img-top game-cover" alt="{{.Name}}" />
|
||||
{{else}}
|
||||
<div class="card-img-top game-cover bg-secondary d-flex align-items-center justify-content-center">
|
||||
<span class="text-white">No Image</span>
|
||||
</div>
|
||||
{{end}}
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">{{.Name}}</h5>
|
||||
<p class="card-text game-description">{{.Description}}</p>
|
||||
{{if .Publishers}}
|
||||
<div class="publishers mb-2">
|
||||
<small class="text-muted">Publishers:</small>
|
||||
{{range $index, $publisher := .Publishers}} {{if $index}}, {{end}}
|
||||
<small class="text-muted">{{$publisher}}</small>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="container py-4">
|
||||
<h2 class="mb-4">Best of the Year</h2>
|
||||
<div class="row g-4">
|
||||
{{range .BestOfTheYear}}
|
||||
<div class="col-12 col-sm-6 col-md-4 col-lg-5-item">
|
||||
<a href="/game/{{.ID.Hex}}" class="card game-card">
|
||||
{{if .Cover}}
|
||||
<img src="{{.Cover}}" class="card-img-top game-cover" alt="{{.Name}}" />
|
||||
{{else}}
|
||||
<div class="card-img-top game-cover bg-secondary d-flex align-items-center justify-content-center">
|
||||
<span class="text-white">No Image</span>
|
||||
</div>
|
||||
{{end}}
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">{{.Name}}</h5>
|
||||
<p class="card-text game-description">{{.Description}}</p>
|
||||
{{if .Publishers}}
|
||||
<div class="publishers mb-2">
|
||||
<small class="text-muted">Publishers:</small>
|
||||
{{range $index, $publisher := .Publishers}} {{if $index}}, {{end}}
|
||||
<small class="text-muted">{{$publisher}}</small>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
{{define "scripts"}} {{end}}
|
23
server/templates/layouts/base.html
Normal file
23
server/templates/layouts/base.html
Normal file
@ -0,0 +1,23 @@
|
||||
{{define "base"}}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" data-bs-theme="dark">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>{{template "title" .}}</title>
|
||||
<link rel="icon" type="image/x-icon" href="/static/favicon.png">
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" />
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/swiper@8/swiper-bundle.min.css" />
|
||||
{{block "styles" .}}{{end}}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
{{template "header" .}} {{block "content" .}}{{end}} {{template "footer" .}}
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
{{block "scripts" .}}{{end}}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
{{end}}
|
11
server/templates/layouts/footer.html
Normal file
11
server/templates/layouts/footer.html
Normal file
@ -0,0 +1,11 @@
|
||||
{{define "footer"}}
|
||||
<!-- Footer -->
|
||||
<footer class="text-light py-4 mt-5">
|
||||
<div class="container">
|
||||
<div class="col text-center">
|
||||
<div><a href="/api/swagger/index.html">API Doc</a></div>
|
||||
<div>Made by <a href="https://www.nite07.com" target="_blank">Nite</a></div>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
{{end}}
|
25
server/templates/layouts/header.html
Normal file
25
server/templates/layouts/header.html
Normal file
@ -0,0 +1,25 @@
|
||||
{{define "header"}}
|
||||
<!-- Header -->
|
||||
<nav class="navbar navbar-expand-lg">
|
||||
<div class="container">
|
||||
<span>
|
||||
<a class="navbar-brand" href="/">GameDB</a>
|
||||
<a href="https://t.me/nitegame" target="_blank">Telegram</a>
|
||||
</span>
|
||||
<div class="navbar-collapse" id="navbarNav">
|
||||
<ul class="navbar-nav">
|
||||
<!-- <li class="nav-item"><a class="nav-link" href="https://www.nite07.com" target="_blank">Blog</a></li> -->
|
||||
</ul>
|
||||
|
||||
<div class="ms-auto">
|
||||
<form action="/search" method="GET" class="d-flex">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" name="key" placeholder="Input full english name of game" />
|
||||
<button class="btn btn-primary" type="submit">Search</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
{{end}}
|
102
server/templates/search.html
Normal file
102
server/templates/search.html
Normal file
@ -0,0 +1,102 @@
|
||||
{{template "base" .}}
|
||||
|
||||
{{define "title"}}{{.Key}} - Search{{end}}
|
||||
|
||||
{{define "styles"}}
|
||||
<style>
|
||||
.game-card {
|
||||
height: 100%;
|
||||
transition: transform 0.2s;
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
.game-card:hover {
|
||||
transform: translateY(-5px);
|
||||
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.game-cover {
|
||||
height: 200px;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.game-description {
|
||||
display: -webkit-box;
|
||||
line-clamp: 3;
|
||||
-webkit-line-clamp: 3;
|
||||
-webkit-box-orient: vertical;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
@media (min-width: 992px) {
|
||||
.col-lg-5-item {
|
||||
flex: 0 0 20%;
|
||||
max-width: 20%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<!-- page -->
|
||||
<div class="container py-4">
|
||||
<h2 class="mb-4">Search</h2>
|
||||
<small>{{.CurrentPage}}/{{.TotalPage}} pages found for "{{.Key}}"</small>
|
||||
</div>
|
||||
<!-- Games Grid -->
|
||||
<div class="container py-4">
|
||||
{{if .Games}}
|
||||
<div class="row g-4">
|
||||
{{range .Games}}
|
||||
<div class="col-12 col-sm-6 col-md-4 col-lg-5-item">
|
||||
<a href="/game/{{.ID.Hex}}" class="card game-card">
|
||||
{{if .Cover}}
|
||||
<img src="{{.Cover}}" class="card-img-top game-cover" alt="{{.Name}}" />
|
||||
{{else}}
|
||||
<div class="card-img-top game-cover bg-secondary d-flex align-items-center justify-content-center">
|
||||
<span class="text-white">No Image</span>
|
||||
</div>
|
||||
{{end}}
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">{{.Name}}</h5>
|
||||
<p class="card-text game-description">{{.Description}}</p>
|
||||
{{if .Publishers}}
|
||||
<div class="publishers mb-2">
|
||||
<small class="text-muted">Publishers:</small>
|
||||
{{range $index, $publisher := .Publishers}} {{if $index}}, {{end}}
|
||||
<small class="text-muted">{{$publisher}}</small>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
{{else}}
|
||||
<div class="text-center py-5">
|
||||
<h4 class="text-muted">No results found for "{{.Key}}"</h4>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
<!-- Pagination -->
|
||||
<div class="container py-4">
|
||||
<nav aria-label="Page navigation">
|
||||
<ul class="pagination justify-content-center">
|
||||
{{if .PrevPage}}
|
||||
<li class="page-item">
|
||||
<a class="page-link" href="/search?key={{.Key}}&page={{.PrevPage}}">Previous</a>
|
||||
</li>
|
||||
{{end}}
|
||||
{{if .NextPage}}
|
||||
<li class="page-item">
|
||||
<a class="page-link" href="/search?key={{.Key}}&page={{.NextPage}}">Next</a>
|
||||
</li>
|
||||
{{end}}
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
{{end}}
|
||||
{{define "scripts"}} {{end}}
|
@ -1,7 +1,7 @@
|
||||
package task
|
||||
|
||||
import (
|
||||
"pcgamedb/db"
|
||||
"game-crawler/db"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -28,9 +28,9 @@ func Clean(logger *zap.Logger) {
|
||||
for _, id := range ids {
|
||||
logger.Info("Cleaned game info with empty game ids", zap.Any("game_id", id))
|
||||
}
|
||||
err = db.MergeGameInfosWithSameName()
|
||||
err = db.MergeGameInfosWithSameIGDBID()
|
||||
if err != nil {
|
||||
logger.Error("Failed to merge same name game infos", zap.Error(err))
|
||||
logger.Error("Failed to merge game infos with same igdb id", zap.Error(err))
|
||||
}
|
||||
logger.Info("Cleaning task completed")
|
||||
}
|
||||
|
@ -1,14 +1,13 @@
|
||||
package task
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"pcgamedb/config"
|
||||
"pcgamedb/crawler"
|
||||
"pcgamedb/db"
|
||||
"pcgamedb/model"
|
||||
"pcgamedb/utils"
|
||||
"game-crawler/config"
|
||||
"game-crawler/crawler"
|
||||
"game-crawler/db"
|
||||
"game-crawler/model"
|
||||
"game-crawler/utils"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"go.uber.org/zap"
|
||||
@ -51,21 +50,25 @@ func Crawl(logger *zap.Logger) {
|
||||
for _, game := range games {
|
||||
ids = append(ids, game.ID)
|
||||
}
|
||||
items, err := db.GetGameItemsByIDs(ids)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get game items", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
var infos []*model.GameInfo
|
||||
for _, game := range items {
|
||||
info, err := db.GetGameInfoByGameItemID(game.ID)
|
||||
if len(ids) != 0 {
|
||||
items, err := db.GetGameItemsByIDs(ids)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get game info", zap.Error(err))
|
||||
continue
|
||||
logger.Error("Failed to get game items", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, game := range items {
|
||||
info, err := db.GetGameInfoByGameItemID(game.ID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get game info", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
info.Games = append(info.Games, game)
|
||||
infos = append(infos, info)
|
||||
}
|
||||
info.Games = append(info.Games, game)
|
||||
infos = append(infos, info)
|
||||
}
|
||||
|
||||
for _, u := range config.Config.Webhooks.CrawlTask {
|
||||
_, err := url.Parse(u)
|
||||
if err != nil {
|
||||
@ -73,14 +76,7 @@ func Crawl(logger *zap.Logger) {
|
||||
continue
|
||||
}
|
||||
logger.Info("webhook triggered", zap.String("task", "crawl"), zap.String("url", u))
|
||||
_, err = utils.Fetch(utils.FetchConfig{
|
||||
Url: u,
|
||||
Method: http.MethodPost,
|
||||
Headers: map[string]string{
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
Data: infos,
|
||||
})
|
||||
_, err = utils.Request().SetHeader("Content-Type", "application/json").SetBody(infos).Post(u)
|
||||
if err != nil {
|
||||
logger.Error("Failed to trigger webhook", zap.String("task", "crawl"), zap.String("url", u), zap.Error(err))
|
||||
}
|
||||
|
29
task/update_game_info.go
Normal file
29
task/update_game_info.go
Normal file
@ -0,0 +1,29 @@
|
||||
package task
|
||||
|
||||
import (
|
||||
"game-crawler/crawler"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
//TODO: IGDB 游戏信息缓存还没有设置有效期
|
||||
|
||||
func UpdateOutdatedGameInfos(logger *zap.Logger) {
|
||||
channel, err := crawler.UpdateGameInfo(10)
|
||||
count := 0
|
||||
if err != nil {
|
||||
logger.Error("Failed to update game info", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for info := range channel {
|
||||
logger.Info("Updated game info",
|
||||
zap.String("id", info.ID.String()),
|
||||
zap.String("name", info.Name),
|
||||
)
|
||||
count++
|
||||
if count == 10 {
|
||||
break
|
||||
}
|
||||
}
|
||||
logger.Info("Updated game info count", zap.Int("count", count))
|
||||
}
|
@ -1,147 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
// https://github.com/ZFC-Digital/cf-clearance-scraper
|
||||
|
||||
type ccsRequest struct {
|
||||
Url string `json:"url"`
|
||||
Mode string `json:"mode"`
|
||||
SiteKey string `json:"siteKey"`
|
||||
}
|
||||
|
||||
type WAFSession struct {
|
||||
Cookies []struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
Domain string `json:"domain"`
|
||||
Path string `json:"path"`
|
||||
Expires float64 `json:"expires"`
|
||||
Size int `json:"size"`
|
||||
HTTPOnly bool `json:"httpOnly"`
|
||||
Secure bool `json:"secure"`
|
||||
Session bool `json:"session"`
|
||||
SameSite string `json:"sameSite"`
|
||||
Priority string `json:"priority"`
|
||||
SameParty bool `json:"sameParty"`
|
||||
SourceScheme string `json:"sourceScheme"`
|
||||
PartitionKey string `json:"partitionKey"`
|
||||
} `json:"cookies"`
|
||||
Headers map[string]string `json:"headers"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
|
||||
func CCSWAFSession(ccsUrl string, requestUrl string) (*WAFSession, error) {
|
||||
data := ccsRequest{
|
||||
Url: requestUrl,
|
||||
Mode: "waf-session",
|
||||
}
|
||||
resp, err := Fetch(FetchConfig{
|
||||
Url: ccsUrl,
|
||||
Method: "POST",
|
||||
Data: data,
|
||||
Timeout: 60 * time.Second,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var response WAFSession
|
||||
err = json.Unmarshal(resp.Data, &response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if response.Code != 200 {
|
||||
return nil, errors.New("failed to get WAF session")
|
||||
}
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func CCSSource(ccsUrl string, requestUrl string) (string, error) {
|
||||
data := ccsRequest{
|
||||
Url: requestUrl,
|
||||
Mode: "source",
|
||||
}
|
||||
resp, err := Fetch(FetchConfig{
|
||||
Url: ccsUrl,
|
||||
Method: "POST",
|
||||
Data: data,
|
||||
Timeout: 60 * time.Second,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
type response struct {
|
||||
Source string `json:"source"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
var ccsResp response
|
||||
err = json.Unmarshal(resp.Data, &ccsResp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if ccsResp.Code != 200 {
|
||||
return "", errors.New("failed to get source")
|
||||
}
|
||||
return ccsResp.Source, nil
|
||||
}
|
||||
|
||||
func CCSTurnstileToken(ccsUrl string, requestUrl string, siteKey string) (string, error) {
|
||||
data := ccsRequest{
|
||||
Url: requestUrl,
|
||||
Mode: "turnstile-min",
|
||||
SiteKey: siteKey,
|
||||
}
|
||||
resp, err := Fetch(FetchConfig{
|
||||
Url: ccsUrl,
|
||||
Method: "POST",
|
||||
Data: data,
|
||||
Timeout: 60 * time.Second,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var ccsResp struct {
|
||||
Token string `json:"token"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
err = json.Unmarshal(resp.Data, &ccsResp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if ccsResp.Code != 200 {
|
||||
return "", errors.New("failed to get source")
|
||||
}
|
||||
return ccsResp.Token, nil
|
||||
}
|
||||
|
||||
func CCSTurnstileMaxToken(ccsUrl string, requestUrl string) (string, error) {
|
||||
data := ccsRequest{
|
||||
Url: requestUrl,
|
||||
Mode: "turnstile-max",
|
||||
}
|
||||
resp, err := Fetch(FetchConfig{
|
||||
Url: ccsUrl,
|
||||
Method: "POST",
|
||||
Data: data,
|
||||
Timeout: 60 * time.Second,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var ccsResp struct {
|
||||
Token string `json:"token"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
err = json.Unmarshal(resp.Data, &ccsResp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if ccsResp.Code != 200 {
|
||||
return "", errors.New("failed to get source")
|
||||
}
|
||||
return ccsResp.Token, nil
|
||||
}
|
16
utils/decoder.go
Normal file
16
utils/decoder.go
Normal file
@ -0,0 +1,16 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
"golang.org/x/text/encoding/htmlindex"
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
func Windows1251ToUTF8(b []byte) []byte {
|
||||
decoder, _ := htmlindex.Get("windows-1251")
|
||||
reader := transform.NewReader(bytes.NewReader(b), decoder.NewDecoder().Transformer)
|
||||
body, _ := io.ReadAll(reader)
|
||||
return body
|
||||
}
|
222
utils/fetch.go
222
utils/fetch.go
@ -1,222 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/net/html/charset"
|
||||
)
|
||||
|
||||
const userAgent string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
|
||||
|
||||
type FetchConfig struct {
|
||||
Method string
|
||||
Url string
|
||||
Data interface{}
|
||||
RetryTimes int
|
||||
Headers map[string]string
|
||||
Cookies map[string]string
|
||||
Timeout time.Duration
|
||||
}
|
||||
|
||||
type FetchResponse struct {
|
||||
StatusCode int
|
||||
Data []byte
|
||||
Header http.Header
|
||||
Cookie []*http.Cookie
|
||||
}
|
||||
|
||||
func Fetch(cfg FetchConfig) (*FetchResponse, error) {
|
||||
var req *http.Request
|
||||
var resp *http.Response
|
||||
var backoff time.Duration = 1
|
||||
var reqBody io.Reader = nil
|
||||
var err error
|
||||
|
||||
if cfg.RetryTimes == 0 {
|
||||
cfg.RetryTimes = 3
|
||||
}
|
||||
if cfg.Method == "" {
|
||||
cfg.Method = "GET"
|
||||
}
|
||||
if cfg.Timeout == 0 {
|
||||
cfg.Timeout = 10 * time.Second
|
||||
}
|
||||
|
||||
if cfg.Data != nil && (cfg.Method == "POST" || cfg.Method == "PUT") {
|
||||
if cfg.Headers == nil {
|
||||
cfg.Headers = map[string]string{}
|
||||
}
|
||||
newHeaders := make(map[string]string)
|
||||
for k, v := range cfg.Headers {
|
||||
newHeaders[strings.ToLower(k)] = v
|
||||
}
|
||||
cfg.Headers = newHeaders
|
||||
if _, exist := cfg.Headers["content-type"]; !exist {
|
||||
cfg.Headers["content-type"] = "application/json"
|
||||
}
|
||||
v := cfg.Headers["content-type"]
|
||||
if v == "application/x-www-form-urlencoded" {
|
||||
switch data := cfg.Data.(type) {
|
||||
case map[string]string:
|
||||
params := url.Values{}
|
||||
for k, v := range data {
|
||||
params.Set(k, v)
|
||||
}
|
||||
reqBody = strings.NewReader(params.Encode())
|
||||
case string:
|
||||
reqBody = strings.NewReader(data)
|
||||
case url.Values:
|
||||
reqBody = strings.NewReader(data.Encode())
|
||||
default:
|
||||
return nil, errors.New("unsupported data type")
|
||||
}
|
||||
} else if v == "application/json" {
|
||||
switch data := cfg.Data.(type) {
|
||||
case []byte:
|
||||
reqBody = bytes.NewReader(data)
|
||||
case string:
|
||||
reqBody = strings.NewReader(data)
|
||||
case interface{}:
|
||||
jsonData, err := json.Marshal(cfg.Data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
reqBody = bytes.NewReader(jsonData)
|
||||
default:
|
||||
return nil, errors.New("unsupported data type")
|
||||
}
|
||||
} else {
|
||||
reqBody = strings.NewReader(cfg.Data.(string))
|
||||
}
|
||||
}
|
||||
|
||||
var bodyBuffer *bytes.Buffer
|
||||
if reqBody != nil {
|
||||
bodyBuffer = new(bytes.Buffer)
|
||||
_, err = io.Copy(bodyBuffer, reqBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
for retryTime := 0; retryTime <= cfg.RetryTimes; retryTime++ {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), cfg.Timeout)
|
||||
defer cancel()
|
||||
|
||||
var currentReqBody io.Reader
|
||||
if bodyBuffer != nil {
|
||||
currentReqBody = bytes.NewReader(bodyBuffer.Bytes())
|
||||
}
|
||||
|
||||
req, err = http.NewRequestWithContext(ctx, cfg.Method, cfg.Url, currentReqBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if v, exist := cfg.Headers["user-agent"]; exist {
|
||||
if v != "" {
|
||||
req.Header.Set("user-agent", v)
|
||||
}
|
||||
} else {
|
||||
req.Header.Set("user-agent", userAgent)
|
||||
}
|
||||
for k, v := range cfg.Headers {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
for k, v := range cfg.Cookies {
|
||||
req.AddCookie(&http.Cookie{Name: k, Value: v})
|
||||
}
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
if isRetryableError(err) {
|
||||
err = errors.New("request error: " + err.Error())
|
||||
time.Sleep(backoff * time.Second)
|
||||
backoff *= 2
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if resp == nil {
|
||||
return nil, errors.New("response is nil")
|
||||
}
|
||||
|
||||
if isRetryableStatusCode(resp.StatusCode) {
|
||||
err = errors.New("response status code: " + resp.Status)
|
||||
time.Sleep(backoff * time.Second)
|
||||
backoff *= 2
|
||||
continue
|
||||
}
|
||||
|
||||
contentType := resp.Header.Get("content-type")
|
||||
var reader io.Reader
|
||||
if strings.Contains(contentType, "charset=") {
|
||||
reader, err = charset.NewReader(resp.Body, contentType)
|
||||
} else {
|
||||
reader = resp.Body
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dataBytes, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := &FetchResponse{
|
||||
StatusCode: resp.StatusCode,
|
||||
Header: resp.Header,
|
||||
Cookie: resp.Cookies(),
|
||||
Data: dataBytes,
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func isRetryableStatusCode(statusCode int) bool {
|
||||
switch statusCode {
|
||||
case http.StatusInternalServerError,
|
||||
http.StatusBadGateway,
|
||||
http.StatusServiceUnavailable,
|
||||
http.StatusGatewayTimeout,
|
||||
http.StatusTooManyRequests:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func isRetryableError(err error) bool {
|
||||
if err != nil {
|
||||
var netErr net.Error
|
||||
if errors.As(err, &netErr) && netErr.Timeout() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func FetchWithWAFSession(cfg FetchConfig, session *WAFSession) (*FetchResponse, error) {
|
||||
if cfg.Cookies == nil {
|
||||
cfg.Cookies = map[string]string{}
|
||||
}
|
||||
for _, cookie := range session.Cookies {
|
||||
cfg.Cookies[cookie.Name] = cookie.Value
|
||||
}
|
||||
if cfg.Headers == nil {
|
||||
cfg.Headers = map[string]string{}
|
||||
}
|
||||
for k, v := range session.Headers {
|
||||
cfg.Headers[k] = v
|
||||
}
|
||||
return Fetch(cfg)
|
||||
}
|
@ -8,18 +8,13 @@ import (
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
func SolveKeepLinks(url string) (string, error) {
|
||||
id := url[strings.LastIndex(url, "/")+1:]
|
||||
resp, err := Fetch(FetchConfig{
|
||||
Url: url,
|
||||
Cookies: map[string]string{
|
||||
fmt.Sprintf("flag[%s]", id): "1",
|
||||
},
|
||||
})
|
||||
func SolveKeepLinks(URL string) (string, error) {
|
||||
id := URL[strings.LastIndex(URL, "/")+1:]
|
||||
resp, err := Request().SetHeader("Cookie", fmt.Sprintf("flag[%s]", id)+"=1").Get(URL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
81
utils/mgnet.go
Normal file
81
utils/mgnet.go
Normal file
@ -0,0 +1,81 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
func GetLinkFromMgnet(URL string) (string, error) {
|
||||
resp, err := Request().Get(URL)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Error while requesting URL: %s: %s", err, URL)
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Error while parsing HTML: %s: %s", err, URL)
|
||||
}
|
||||
ad_form_data := doc.Find("[name='ad_form_data']").AttrOr("value", "")
|
||||
if ad_form_data == "" {
|
||||
return "", fmt.Errorf("Failed to get ad_form_data: %s", URL)
|
||||
}
|
||||
token_fields := doc.Find("[name='_Token[fields]']").AttrOr("value", "")
|
||||
if token_fields == "" {
|
||||
return "", fmt.Errorf("Failed to get _Token[fields]: %s", URL)
|
||||
}
|
||||
token_unlocked := doc.Find("[name='_Token[unlocked]']").AttrOr("value", "")
|
||||
if token_unlocked == "" {
|
||||
return "", fmt.Errorf("Failed to get _Token[unlocked]: %s", URL)
|
||||
}
|
||||
cookies := resp.Cookies()
|
||||
csrfToken := ""
|
||||
for _, cookie := range cookies {
|
||||
if cookie.Name == "csrfToken" {
|
||||
csrfToken = cookie.Value
|
||||
break
|
||||
}
|
||||
}
|
||||
if csrfToken == "" {
|
||||
return "", fmt.Errorf("Failed to get csrfToken: %s", URL)
|
||||
}
|
||||
|
||||
params := url.Values{}
|
||||
params.Set("_method", "POST")
|
||||
params.Set("_csrfToken", csrfToken)
|
||||
params.Set("ad_form_data", ad_form_data)
|
||||
params.Set("_Token[fields]", token_fields)
|
||||
params.Set("_Token[unlocked]", token_unlocked)
|
||||
cookies = append(cookies, &http.Cookie{
|
||||
Name: "ab",
|
||||
Value: "2",
|
||||
})
|
||||
|
||||
resp, err = Request().SetHeaders(map[string]string{
|
||||
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"Referer": URL,
|
||||
}).SetCookies(cookies).SetBody(params.Encode()).Post("https://mgnet.site/links/go")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Error while requesting URL: %s: %s", err, "https://mgnet.site/links/go")
|
||||
}
|
||||
|
||||
type requestResult struct {
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
res := requestResult{}
|
||||
err = json.Unmarshal(resp.Body(), &res)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Error while parsing JSON: %s", err)
|
||||
}
|
||||
if res.Status != "success" {
|
||||
return "", fmt.Errorf("Failed to get link: %s: %s: %+v", res.Message, URL, res)
|
||||
}
|
||||
return res.URL, nil
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user