commit b9ca7e2338880351537bc3873fa56b9404c5d480 Author: Nite07 Date: Tue Sep 24 18:17:11 2024 +0800 🎉 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3448526 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +.idea +.vscode +.VSCodeCounter +dist +logs +docs +*test.go +deploy.sh +config.json +organize.json diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..2764916 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,17 @@ +FROM golang:1.21-alpine AS builder +LABEL authors="Nite07" + +WORKDIR /app +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . +RUN go install github.com/swaggo/swag/cmd/swag@latest +RUN swag init +RUN CGO_ENABLED=0 GOOS=linux go build -o pcgamedb . + +FROM alpine:latest +WORKDIR /app +COPY --from=builder /app/pcgamedb /app/pcgamedb + +ENTRYPOINT ["/app/pcgamedb", "server"] \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2a81ff9 --- /dev/null +++ b/README.md @@ -0,0 +1,40 @@ +# pcgamedb + +pcgamedb is a powerful command-line tool designed to scrape and manage repack game data from various online sources. With support for multiple data sources and the ability to provide a RESTful API. + +## Features + +- **Data Sources**: + + - Fitgirl + - KaOSKrew + - DODI + - ~~FreeGOG~~ + - GOGGames + - OnlineFix + - Xatab + - ~~ARMGDDN~~ + - SteamRIP + - Chovka + +- **Database**: + + - Stores game data in MongoDB + - Supports Redis for caching to improve performance + +- **RESTful API**: + - Provides an API for external access to the game data + +## Usage + +run `go run . help`. + +## Configuration + +Edit the `config.json` file to set up your environment or set system environment variables. + +Read `/config/config.go` for more details. + +## Api Doc + +Read `http://127.0.0.1:/swagger/index.html` for more details. diff --git a/build.sh b/build.sh new file mode 100644 index 0000000..1d0d44f --- /dev/null +++ b/build.sh @@ -0,0 +1,4 @@ +go install github.com/swaggo/swag/cmd/swag@latest +swag init +CGO_ENABLED=0 +go build -o gamedb . diff --git a/cache/redis.go b/cache/redis.go new file mode 100644 index 0000000..c04145f --- /dev/null +++ b/cache/redis.go @@ -0,0 +1,83 @@ +package cache + +import ( + "context" + "fmt" + "pcgamedb/config" + "pcgamedb/log" + "sync" + "time" + + "github.com/redis/go-redis/v9" +) + +var cache *redis.Client +var mutx = &sync.RWMutex{} + +func connect() { + if !config.Config.RedisAvaliable { + return + } + cache = redis.NewClient(&redis.Options{ + Addr: fmt.Sprintf("%s:%d", config.Config.Redis.Host, config.Config.Redis.Port), + Password: config.Config.Redis.Password, + DB: config.Config.Redis.DBIndex, + }) + err := HealthCheck() + if err != nil { + log.Logger.Panic("Cannot connect to redis") + } + log.Logger.Info("Connected to redis") +} + +func CheckConnect() { + mutx.RLock() + if cache != nil { + mutx.RUnlock() + return + } + mutx.RUnlock() + + mutx.Lock() + if cache == nil { + connect() + } + mutx.Unlock() +} + +func HealthCheck() error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + result, err := cache.Ping(ctx).Result() + if err != nil { + return err + } + if result != "PONG" { + return fmt.Errorf("unexpected response from Redis: %s", result) + } + return nil +} + +func Get(key string) (string, bool) { + CheckConnect() + ctx := context.Background() + value, err := cache.Get(ctx, key).Result() + if err != nil { + return "", false + } + return value, true +} + +func Add(key string, value interface{}) error { + CheckConnect() + ctx := context.Background() + cmd := cache.Set(ctx, key, value, 7*24*time.Hour) + return cmd.Err() +} + +func AddWithExpire(key string, value interface{}, expire time.Duration) error { + CheckConnect() + ctx := context.Background() + cmd := cache.Set(ctx, key, value, expire) + return cmd.Err() +} diff --git a/cmd/clean.go b/cmd/clean.go new file mode 100644 index 0000000..fad92ae --- /dev/null +++ b/cmd/clean.go @@ -0,0 +1,21 @@ +package cmd + +import ( + "pcgamedb/log" + "pcgamedb/task" + + "github.com/spf13/cobra" +) + +var cleanCmd = &cobra.Command{ + Use: "clean", + Long: "Clean database", + Short: "Clean database", + Run: func(cmd *cobra.Command, args []string) { + task.Clean(log.Logger) + }, +} + +func init() { + RootCmd.AddCommand(cleanCmd) +} diff --git a/cmd/crawl.go b/cmd/crawl.go new file mode 100644 index 0000000..1232854 --- /dev/null +++ b/cmd/crawl.go @@ -0,0 +1,140 @@ +package cmd + +import ( + "errors" + "fmt" + "pcgamedb/crawler" + "pcgamedb/log" + "pcgamedb/utils" + "strconv" + "strings" + + "github.com/spf13/cobra" + "go.uber.org/zap" +) + +var crawlCmd = &cobra.Command{ + Use: "crawl", + Long: "Crawl games from specific platforms", + Short: "Crawl games from specific platforms", + Run: crawlRun, +} + +type CrawlCommandConfig struct { + Source string + Page string + All bool + Num int +} + +var crawlCmdCfg CrawlCommandConfig + +var crawlerMap = map[string]crawler.Crawler{} + +func init() { + crawlerMap = crawler.BuildCrawlerMap(log.Logger) + allCrawlerBuilder := strings.Builder{} + paginationCrwalerBuilder := strings.Builder{} + noPaginationCrawlerBuilder := strings.Builder{} + for k, v := range crawlerMap { + allCrawlerBuilder.WriteString(k) + allCrawlerBuilder.WriteString(",") + if _, ok := v.(crawler.PagedCrawler); ok { + paginationCrwalerBuilder.WriteString(k) + paginationCrwalerBuilder.WriteString(",") + } else if _, ok := v.(crawler.SimpleCrawler); ok { + noPaginationCrawlerBuilder.WriteString(k) + noPaginationCrawlerBuilder.WriteString(",") + } + } + crawlCmd.Flags().StringVarP(&crawlCmdCfg.Source, "source", "s", "", fmt.Sprintf("source to crawl (%s)", strings.Trim(allCrawlerBuilder.String(), ","))) + crawlCmd.Flags().StringVarP(&crawlCmdCfg.Page, "pages", "p", "1", fmt.Sprintf("pages to crawl (1,2,3 or 1-3) (%s)", strings.Trim(paginationCrwalerBuilder.String(), ","))) + crawlCmd.Flags().BoolVarP(&crawlCmdCfg.All, "all", "a", false, "crawl all page") + crawlCmd.Flags().IntVarP(&crawlCmdCfg.Num, "num", "n", -1, fmt.Sprintf("number of items to process (%s)", strings.Trim(noPaginationCrawlerBuilder.String(), ","))) + RootCmd.AddCommand(crawlCmd) +} + +func crawlRun(cmd *cobra.Command, args []string) { + crawlCmdCfg.Source = strings.ToLower(crawlCmdCfg.Source) + + if crawlCmdCfg.Source == "" { + log.Logger.Error("Source is required") + return + } + + item, ok := crawlerMap[crawlCmdCfg.Source] + if !ok { + log.Logger.Error("Invalid source", zap.String("source", crawlCmdCfg.Source)) + return + } + + if c, ok := item.(crawler.PagedCrawler); ok { + if crawlCmdCfg.All { + _, err := c.CrawlAll() + if err != nil { + log.Logger.Error("Crawl error", zap.Error(err)) + return + } + } else { + pages, err := pagination(crawlCmdCfg.Page) + if err != nil { + log.Logger.Error("Invalid page", zap.String("page", crawlCmdCfg.Page)) + return + } + _, err = c.CrawlMulti(pages) + if err != nil { + log.Logger.Error("Crawl error", zap.Error(err)) + return + } + } + } else if c, ok := item.(crawler.SimpleCrawler); ok { + if crawlCmdCfg.All { + _, err := c.CrawlAll() + if err != nil { + log.Logger.Error("Crawl error", zap.Error(err)) + return + } + } else { + _, err := c.Crawl(crawlCmdCfg.Num) + if err != nil { + log.Logger.Error("Crawl error", zap.Error(err)) + return + } + } + } +} + +func pagination(pageStr string) ([]int, error) { + if pageStr == "" { + return nil, errors.New("empty page") + } + var pages []int + pageSlice := strings.Split(pageStr, ",") + for i := 0; i < len(pageSlice); i++ { + if strings.Contains(pageSlice[i], "-") { + pageRange := strings.Split(pageSlice[i], "-") + start, err := strconv.Atoi(pageRange[0]) + if err != nil { + return nil, err + } + end, err := strconv.Atoi(pageRange[1]) + if err != nil { + return nil, err + } + if start > end { + return nil, err + } + for j := start; j <= end; j++ { + pages = append(pages, j) + } + } else { + p, err := strconv.Atoi(pageSlice[i]) + if err != nil { + log.Logger.Error("Invalid page", zap.String("page", pageSlice[i])) + return nil, err + } + pages = append(pages, p) + } + } + return utils.Unique(pages), nil +} diff --git a/cmd/format.go b/cmd/format.go new file mode 100644 index 0000000..6a1a7e7 --- /dev/null +++ b/cmd/format.go @@ -0,0 +1,137 @@ +package cmd + +import ( + "pcgamedb/crawler" + "pcgamedb/db" + "pcgamedb/log" + "strings" + + "github.com/spf13/cobra" + "go.uber.org/zap" +) + +var formatCmd = &cobra.Command{ + Use: "format", + Short: "Format game downloads name by formatter", + Long: "Format game downloads name by formatter", + Run: formatRun, +} + +type FormatCommandConfig struct { + Source string +} + +var formatCmdCfg FormatCommandConfig + +func init() { + formatCmd.Flags().StringVarP(&formatCmdCfg.Source, "source", "s", "", "source to fix (fitgirl/dodi/kaoskrew/freegog/xatab/onlinefix/armgddn)") + RootCmd.AddCommand(formatCmd) +} + +func formatRun(cmd *cobra.Command, args []string) { + formatCmdCfg.Source = strings.ToLower(formatCmdCfg.Source) + switch formatCmdCfg.Source { + case "dodi": + items, err := db.GetDODIGameDownloads() + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + return + } + for _, item := range items { + oldName := item.Name + item.Name = crawler.DODIFormatter(item.RawName) + if oldName != item.Name { + log.Logger.Info("Fix name", zap.String("old", oldName), zap.String("raw", item.RawName), zap.String("name", item.Name)) + err := db.SaveGameDownload(item) + if err != nil { + log.Logger.Error("Failed to update item", zap.Error(err)) + } + } + } + case "kaoskrew": + items, err := db.GetKaOsKrewGameDownloads() + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + return + } + for _, item := range items { + oldName := item.Name + item.Name = crawler.KaOsKrewFormatter(item.RawName) + if oldName != item.Name { + log.Logger.Info("Fix name", zap.String("old", oldName), zap.String("raw", item.RawName), zap.String("name", item.Name)) + err := db.SaveGameDownload(item) + if err != nil { + log.Logger.Error("Failed to update item", zap.Error(err)) + } + } + } + case "freegog": + items, err := db.GetFreeGOGGameDownloads() + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + return + } + for _, item := range items { + oldName := item.Name + item.Name = crawler.FreeGOGFormatter(item.RawName) + if oldName != item.Name { + log.Logger.Info("Fix name", zap.String("old", oldName), zap.String("raw", item.RawName), zap.String("name", item.Name)) + err := db.SaveGameDownload(item) + if err != nil { + log.Logger.Error("Failed to update item", zap.Error(err)) + } + } + } + case "xatab": + items, err := db.GetXatabGameDownloads() + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + return + } + for _, item := range items { + oldName := item.Name + item.Name = crawler.XatabFormatter(item.RawName) + if oldName != item.Name { + log.Logger.Info("Fix name", zap.String("old", oldName), zap.String("raw", item.RawName), zap.String("name", item.Name)) + err := db.SaveGameDownload(item) + if err != nil { + log.Logger.Error("Failed to update item", zap.Error(err)) + } + } + } + case "onlinefix": + items, err := db.GetOnlineFixGameDownloads() + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + return + } + for _, item := range items { + oldName := item.Name + item.Name = crawler.OnlineFixFormatter(item.RawName) + if oldName != item.Name { + log.Logger.Info("Fix name", zap.String("old", oldName), zap.String("raw", item.RawName), zap.String("name", item.Name)) + err := db.SaveGameDownload(item) + if err != nil { + log.Logger.Error("Failed to update item", zap.Error(err)) + } + } + } + case "armgddn": + items, err := db.GetARMGDDNGameDownloads() + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + return + } + for _, item := range items { + oldName := item.Name + item.Name = crawler.ARMGDDNFormatter(item.RawName) + if oldName != item.Name { + log.Logger.Info("Fix name", zap.String("old", oldName), zap.String("raw", item.RawName), zap.String("name", item.Name)) + err := db.SaveGameDownload(item) + if err != nil { + log.Logger.Error("Failed to update item", zap.Error(err)) + } + } + } + } +} diff --git a/cmd/list.go b/cmd/list.go new file mode 100644 index 0000000..0be4c2b --- /dev/null +++ b/cmd/list.go @@ -0,0 +1,44 @@ +package cmd + +import ( + "pcgamedb/db" + "pcgamedb/log" + + "github.com/spf13/cobra" + "go.uber.org/zap" +) + +var listCmd = &cobra.Command{ + Use: "list", + Long: "List game infos by filter", + Short: "List game infos by filter", + Run: listRun, +} + +type listCommandConfig struct { + Unid bool +} + +var listCmdCfg listCommandConfig + +func init() { + listCmd.Flags().BoolVarP(&listCmdCfg.Unid, "unorganized", "u", false, "unorganized") + RootCmd.AddCommand(listCmd) +} + +func listRun(cmd *cobra.Command, args []string) { + if listCmdCfg.Unid { + games, err := db.GetUnorganizedGameDownloads(-1) + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + } + for _, game := range games { + log.Logger.Info( + "Game", + zap.Any("game_id", game.ID), + zap.String("raw_name", game.RawName), + zap.String("name", game.Name), + ) + } + } +} diff --git a/cmd/organize.go b/cmd/organize.go new file mode 100644 index 0000000..dcfcd6b --- /dev/null +++ b/cmd/organize.go @@ -0,0 +1,47 @@ +package cmd + +import ( + "pcgamedb/crawler" + "pcgamedb/db" + "pcgamedb/log" + + "github.com/spf13/cobra" + "go.uber.org/zap" +) + +var organizeCmd = &cobra.Command{ + Use: "organize", + Long: "Organize game info by repack game's name", + Short: "Organize game info by repack game's name", + Run: organizeRun, +} + +type organizeCommandConfig struct { + Num int +} + +var organizeCmdCfg organizeCommandConfig + +func init() { + organizeCmd.Flags().IntVarP(&organizeCmdCfg.Num, "num", "n", -1, "number of items to process") + RootCmd.AddCommand(organizeCmd) +} + +func organizeRun(cmd *cobra.Command, args []string) { + games, err := db.GetUnorganizedGameDownloads(organizeCmdCfg.Num) + if err != nil { + log.Logger.Error("Failed to get games", zap.Error(err)) + } + for _, game := range games { + gameInfo, err := crawler.OrganizeGameDownload(game) + if err == nil { + err = db.SaveGameInfo(gameInfo) + if err != nil { + log.Logger.Error("Failed to save game info", zap.Error(err)) + continue + } + log.Logger.Info("Organized game", zap.String("name", game.Name)) + } + log.Logger.Error("Failed to organize game", zap.String("name", game.Name)) + } +} diff --git a/cmd/organize_manually.go b/cmd/organize_manually.go new file mode 100644 index 0000000..d6984ee --- /dev/null +++ b/cmd/organize_manually.go @@ -0,0 +1,72 @@ +package cmd + +import ( + "encoding/json" + "os" + "pcgamedb/crawler" + "pcgamedb/db" + "pcgamedb/log" + + "github.com/spf13/cobra" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.uber.org/zap" +) + +var addCmd = &cobra.Command{ + Use: "manual", + Long: "Manually add information for games that cannot match IDs from IGDB, Steam or GOG", + Short: "Manually add information for games that cannot match IDs from IGDB, Steam or GOG", + Run: addRun, +} + +type ManualCommandConfig struct { + GameID string `json:"game_id"` + Platform string `json:"platform"` + PlatformID int `json:"platform_id"` + Config string +} + +var manualCmdCfg ManualCommandConfig + +func init() { + addCmd.Flags().StringVarP(&manualCmdCfg.GameID, "game-id", "i", "", "repack game id") + addCmd.Flags().StringVarP(&manualCmdCfg.Platform, "platform", "t", "", "platform") + addCmd.Flags().IntVarP(&manualCmdCfg.PlatformID, "platform-id", "p", 0, "platform id") + addCmd.Flags().StringVarP(&manualCmdCfg.Config, "config", "c", "", "config path") + organizeCmd.AddCommand(addCmd) +} + +func addRun(cmd *cobra.Command, args []string) { + c := []*ManualCommandConfig{} + if manualCmdCfg.Config != "" { + data, err := os.ReadFile(manualCmdCfg.Config) + if err != nil { + log.Logger.Error("Failed to read config file", zap.Error(err)) + return + } + if err = json.Unmarshal(data, &c); err != nil { + log.Logger.Error("Failed to unmarshal config file", zap.Error(err)) + return + } + } else { + c = append(c, &manualCmdCfg) + } + for _, v := range c { + objID, err := primitive.ObjectIDFromHex(v.GameID) + if err != nil { + log.Logger.Error("Failed to parse game id", zap.Error(err)) + continue + } + info, err := crawler.OrganizeGameDownloadManually(objID, v.Platform, v.PlatformID) + if err != nil { + log.Logger.Error("Failed to add game info", zap.Error(err)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + log.Logger.Error("Failed to save game info", zap.Error(err)) + continue + } + log.Logger.Info("Added game info", zap.String("game_id", v.GameID), zap.String("id_type", v.Platform), zap.Int("id", v.PlatformID)) + } +} diff --git a/cmd/root.go b/cmd/root.go new file mode 100644 index 0000000..ebef5cc --- /dev/null +++ b/cmd/root.go @@ -0,0 +1,7 @@ +package cmd + +import ( + "github.com/spf13/cobra" +) + +var RootCmd = &cobra.Command{} diff --git a/cmd/server.go b/cmd/server.go new file mode 100644 index 0000000..a9fb40b --- /dev/null +++ b/cmd/server.go @@ -0,0 +1,36 @@ +package cmd + +import ( + "pcgamedb/config" + "pcgamedb/server" + + "github.com/spf13/cobra" +) + +var serverCmd = &cobra.Command{ + Use: "server", + Long: "Start API server", + Short: "Start API server", + Run: ServerRun, +} + +type serverCommandConfig struct { + Port string + AutoCrawl bool +} + +var serverCmdCfg serverCommandConfig + +func init() { + serverCmd.Flags().StringVarP(&serverCmdCfg.Port, "port", "p", "8080", "server port") + serverCmd.Flags().BoolVarP(&serverCmdCfg.AutoCrawl, "auto-crawl", "c", true, "enable auto crawl") + RootCmd.AddCommand(serverCmd) +} + +func ServerRun(cmd *cobra.Command, args []string) { + if serverCmdCfg.AutoCrawl { + config.Config.Server.AutoCrawl = true + } + config.Config.Server.Port = serverCmdCfg.Port + server.Run() +} diff --git a/cmd/task.go b/cmd/task.go new file mode 100644 index 0000000..84ac44e --- /dev/null +++ b/cmd/task.go @@ -0,0 +1,38 @@ +package cmd + +import ( + "pcgamedb/log" + "pcgamedb/task" + + "github.com/robfig/cron/v3" + "github.com/spf13/cobra" + "go.uber.org/zap" +) + +type taskCommandConfig struct { + Crawl bool +} + +var taskCmdCfg taskCommandConfig + +var taskCmd = &cobra.Command{ + Use: "task", + Long: "Start task", + Run: func(cmd *cobra.Command, args []string) { + if taskCmdCfg.Crawl { + task.Crawl(log.Logger) + c := cron.New() + _, err := c.AddFunc("0 0 * * *", func() { task.Crawl(log.Logger) }) + if err != nil { + log.Logger.Error("Failed to add task", zap.Error(err)) + } + c.Start() + select {} + } + }, +} + +func init() { + taskCmd.Flags().BoolVarP(&taskCmdCfg.Crawl, "crawl", "c", false, "enable auto crawl") + RootCmd.AddCommand(taskCmd) +} diff --git a/cmd/update.go b/cmd/update.go new file mode 100644 index 0000000..258eaa2 --- /dev/null +++ b/cmd/update.go @@ -0,0 +1,57 @@ +package cmd + +import ( + "pcgamedb/crawler" + "pcgamedb/db" + "pcgamedb/log" + + "github.com/spf13/cobra" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.uber.org/zap" +) + +var updateCmd = &cobra.Command{ + Use: "update", + Long: "Update game info by game data platform", + Short: "Update game info by game data platform", + Run: updateRun, +} + +type updateCommandConfig struct { + PlatformID int + Platform string + ID string +} + +var updateCmdcfx updateCommandConfig + +func init() { + updateCmd.Flags().IntVarP(&updateCmdcfx.PlatformID, "platform-id", "p", 0, "platform id") + updateCmd.Flags().StringVarP(&updateCmdcfx.Platform, "platform", "t", "", "platform") + updateCmd.Flags().StringVarP(&updateCmdcfx.ID, "game-id", "i", "", "game info id") + RootCmd.AddCommand(updateCmd) +} + +func updateRun(cmd *cobra.Command, args []string) { + id, err := primitive.ObjectIDFromHex(updateCmdcfx.ID) + if err != nil { + log.Logger.Error("Failed to parse game info id", zap.Error(err)) + return + } + oldInfo, err := db.GetGameInfoByID(id) + if err != nil { + log.Logger.Error("Failed to get game info", zap.Error(err)) + return + } + newInfo, err := crawler.GenerateGameInfo(updateCmdcfx.Platform, updateCmdcfx.PlatformID) + if err != nil { + log.Logger.Error("Failed to generate game info", zap.Error(err)) + return + } + newInfo.ID = id + newInfo.GameIDs = oldInfo.GameIDs + err = db.SaveGameInfo(newInfo) + if err != nil { + log.Logger.Error("Failed to save game info", zap.Error(err)) + } +} diff --git a/config.example.json b/config.example.json new file mode 100644 index 0000000..ff44d7c --- /dev/null +++ b/config.example.json @@ -0,0 +1,27 @@ +{ + "log_level": "info", + "server": { + "port": "8080", + "secret_key": "default" + }, + "database": { + "host": "127.0.0.1", + "port": 27017, + "user": "root", + "password": "password", + "database": "gamedb" + }, + "redis": { + "host": "127.0.0.1", + "port": 6379, + "db_index": 0 + }, + "online_fix": { + "user": "user", + "password": "password" + }, + "twitch": { + "client_id": "client_id", + "client_secret": "client_secret" + } +} diff --git a/config/config.go b/config/config.go new file mode 100644 index 0000000..6ef7356 --- /dev/null +++ b/config/config.go @@ -0,0 +1,127 @@ +package config + +import ( + "bytes" + "encoding/json" + "os" + "os/exec" + "reflect" + "strconv" + "time" +) + +type config struct { + LogLevel string `env:"LOG_LEVEL" json:"log_level"` + Server server `json:"server"` + Database database `json:"database"` + Redis redis `json:"redis"` + OnlineFix onlinefix `json:"online_fix"` + Twitch twitch `json:"twitch"` + DatabaseAvaliable bool + OnlineFixAvaliable bool + MegaAvaliable bool + RedisAvaliable bool +} + +type server struct { + Port string `env:"SERVER_PORT" json:"port"` + SecretKey string `env:"SERVER_SECRET_KEY" json:"secret_key"` + AutoCrawl bool `env:"SERVER_AUTO_CRAWL" json:"auto_crawl"` +} + +type database struct { + Host string `env:"DATABASE_HOST" json:"host"` + Port int `env:"DATABASE_PORT" json:"port"` + User string `env:"DATABASE_USER" json:"user"` + Password string `env:"DATABASE_PASSWORD" json:"password"` + Database string `env:"DATABASE_NAME" json:"database"` +} + +type twitch struct { + ClientID string `env:"TWITCH_CLIENT_ID" json:"client_id"` + ClientSecret string `env:"TWITCH_CLIENT_SECRET" json:"client_secret"` +} + +type redis struct { + Host string `env:"REDIS_HOST" json:"host"` + Port int `env:"REDIS_PORT" json:"port"` + Password string `env:"REDIS_PASSWORD" json:"password"` + DBIndex int `env:"REDIS_DB" json:"db_index"` +} + +type onlinefix struct { + User string `env:"ONLINEFIX_USER" json:"user"` + Password string `env:"ONLINEFIX_PASSWORD" json:"password"` +} + +type runtimeConfig struct { + ServerStartTime time.Time +} + +var Config config +var Runtime runtimeConfig + +func init() { + Config = config{ + LogLevel: "info", + Database: database{ + Port: 27017, + User: "root", + Password: "password", + }, + MegaAvaliable: TestMega(), + } + if _, err := os.Stat("config.json"); err == nil { + configData, err := os.ReadFile("config.json") + if err != nil { + panic(err) + } + err = json.Unmarshal(configData, &Config) + if err != nil { + panic(err) + } + } + loadEnvVariables(&Config) + Config.OnlineFixAvaliable = Config.OnlineFix.User != "" && Config.OnlineFix.Password != "" + Config.RedisAvaliable = Config.Redis.Host != "" + Config.DatabaseAvaliable = Config.Database.Database != "" && Config.Database.Host != "" +} + +func loadEnvVariables(cfg interface{}) { + v := reflect.ValueOf(cfg).Elem() + t := v.Type() + for i := 0; i < v.NumField(); i++ { + field := t.Field(i) + envTag := field.Tag.Get("env") + if envTag == "" || envTag == "-" { + if field.Type.Kind() == reflect.Struct { + loadEnvVariables(v.Field(i).Addr().Interface()) + } + continue + } + envValue := os.Getenv(envTag) + if envValue == "" { + continue + } + switch field.Type.Kind() { + case reflect.String: + v.Field(i).SetString(envValue) + case reflect.Int: + if value, err := strconv.Atoi(envValue); err == nil { + v.Field(i).SetInt(int64(value)) + } + case reflect.Bool: + if value, err := strconv.ParseBool(envValue); err == nil { + v.Field(i).SetBool(value) + } + } + } +} + +func TestMega() bool { + cmd := exec.Command("mega-get", "--help") + var out bytes.Buffer + cmd.Stdout = &out + err := cmd.Run() + return err == nil +} diff --git a/constant/graphql.go b/constant/graphql.go new file mode 100644 index 0000000..65ca20d --- /dev/null +++ b/constant/graphql.go @@ -0,0 +1,3 @@ +package constant + +const EpicStoreSearchQuery = "query searchStoreQuery($allowCountries: String, $category: String, $count: Int, $country: String!, $keywords: String, $locale: String, $namespace: String, $itemNs: String, $sortBy: String, $sortDir: String, $start: Int, $tag: String, $releaseDate: String, $withPrice: Boolean = false, $withPromotions: Boolean = false) {\n Catalog {\n searchStore(allowCountries: $allowCountries, category: $category, count: $count, country: $country, keywords: $keywords, locale: $locale, namespace: $namespace, itemNs: $itemNs, sortBy: $sortBy, sortDir: $sortDir, releaseDate: $releaseDate, start: $start, tag: $tag) {\n elements {\n title\n id\n namespace\n description\n effectiveDate\n keyImages {\n type\n url\n }\n seller {\n id\n name\n }\n productSlug\n urlSlug\n url\n tags {\n id\n }\n items {\n id\n namespace\n }\n customAttributes {\n key\n value\n }\n categories {\n path\n }\n price(country: $country) @include(if: $withPrice) {\n totalPrice {\n discountPrice\n originalPrice\n voucherDiscount\n discount\n currencyCode\n currencyInfo {\n decimals\n }\n fmtPrice(locale: $locale) {\n originalPrice\n discountPrice\n intermediatePrice\n }\n }\n lineOffers {\n appliedRules {\n id\n endDate\n discountSetting {\n discountType\n }\n }\n }\n }\n promotions(category: $category) @include(if: $withPromotions) {\n promotionalOffers {\n promotionalOffers {\n startDate\n endDate\n discountSetting {\n discountType\n discountPercentage\n }\n }\n }\n upcomingPromotionalOffers {\n promotionalOffers {\n startDate\n endDate\n discountSetting {\n discountType\n discountPercentage\n }\n }\n }\n }\n }\n paging {\n count\n total\n }\n }\n }\n}\n" diff --git a/constant/language.go b/constant/language.go new file mode 100644 index 0000000..b3ae9fb --- /dev/null +++ b/constant/language.go @@ -0,0 +1,121 @@ +package constant + +type language struct { + Name string `json:"name"` + NativeName string `json:"native_name"` +} + +var IGDBLanguages map[int]language = map[int]language{ + 1: { + Name: "Arabic", + NativeName: "العربية", + }, + 2: { + Name: "Chinese (Simplified)", + NativeName: "简体中文", + }, + 3: { + Name: "Chinese (Traditional)", + NativeName: "繁體中文", + }, + 4: { + Name: "Czech", + NativeName: "čeština", + }, + 5: { + Name: "Danish", + NativeName: "Dansk", + }, + 6: { + Name: "Dutch", + NativeName: "Nederlands", + }, + 7: { + Name: "English", + NativeName: "English (US)", + }, + 8: { + Name: "English (UK)", + NativeName: "English (UK)", + }, + 9: { + Name: "Spanish (Spain)", + NativeName: "Español (España)", + }, + 10: { + Name: "Spanish (Mexico)", + NativeName: "Español (Mexico)", + }, + 12: { + Name: "French", + NativeName: "Français", + }, + 14: { + Name: "Hungarian", + NativeName: "Magyar", + }, + 11: { + Name: "Finnish", + NativeName: "Suomi", + }, + 15: { + Name: "Italian", + NativeName: "Italiano", + }, + 13: { + Name: "Hebrew", + NativeName: "עברית", + }, + 16: { + Name: "Japanese", + NativeName: "日本語", + }, + 17: { + Name: "Korean", + NativeName: "한국어", + }, + 18: { + Name: "Norwegian", + NativeName: "Norsk", + }, + 20: { + Name: "Portuguese (Portugal)", + NativeName: "Português (Portugal)", + }, + 21: { + Name: "Portuguese (Brazil)", + NativeName: "Português (Brasil)", + }, + 19: { + Name: "Polish", + NativeName: "Polski", + }, + 22: { + Name: "Russian", + NativeName: "Русский", + }, + 24: { + Name: "Turkish", + NativeName: "Türkçe", + }, + 25: { + Name: "Thai", + NativeName: "ไทย", + }, + 26: { + Name: "Vietnamese", + NativeName: "Tiếng Việt", + }, + 23: { + Name: "Swedish", + NativeName: "Svenska", + }, + 27: { + Name: "German", + NativeName: "Deutsch", + }, + 28: { + Name: "Ukrainian", + NativeName: "українська", + }, +} diff --git a/constant/url.go b/constant/url.go new file mode 100644 index 0000000..2256443 --- /dev/null +++ b/constant/url.go @@ -0,0 +1,32 @@ +package constant + +const ( + C1337xBaseURL = "https://www.1337x.to" + FreeGOGListURL = "https://freegogpcgames.com/a-z-games-list" + GOGGamesBaseURL = "https://www.gog-games.to" + GOGGamesURL = "https://www.gog-games.to/search/all/%v/date/desc/any" + GOGSearchURL = "https://embed.gog.com/games/ajax/filtered" + GOGDetailsURL = "https://api.gog.com/products" + SteamSearchURL = "https://store.steampowered.com/search" + SteamAppDetailURL = "https://store.steampowered.com/api/appdetails" + SteamAllAppsURL = "https://api.steampowered.com/ISteamApps/GetAppList/v2/?format=json" + XatabBaseURL = "https://byxatab.com" + GoogleSearchURL = "https://www.google.com/search" + BingSearchURL = "https://www.bing.com/search" + OnlineFixURL = "https://online-fix.me" + OnlineFixCSRFURL = "https://online-fix.me/engine/ajax/authtoken.php" + IGDBGameURL = "https://api.igdb.com/v4/games" + IGDBSearchURL = "https://api.igdb.com/v4/search" + IGDBCompaniesURL = "https://api.igdb.com/v4/companies" + IGDBWebsitesURL = "https://api.igdb.com/v4/websites" + TwitchAuthURL = "https://id.twitch.tv/oauth2/token" + Steam250Top250URL = "https://steam250.com/top250" + Steam250BestOfTheYearURL = "https://steam250.com/%v" + Steam250WeekTop50URL = "https://steam250.com/7day" + Steam250MostPlayedURL = "https://steam250.com/most_played" + FitGirlURL = "https://fitgirl-repacks.site/page/%v/" + SteamRIPBaseURL = "https://steamrip.com" + SteamRIPGameListURL = "https://steamrip.com/games-list-page/" + RepackInfoURL = "https://repack.info/page/%v/" + GnarlyURL = "https://rentry.org/gnarly_repacks" +) diff --git a/crawler/1337x.go b/crawler/1337x.go new file mode 100644 index 0000000..515d33e --- /dev/null +++ b/crawler/1337x.go @@ -0,0 +1,182 @@ +package crawler + +import ( + "bytes" + "errors" + "fmt" + "regexp" + "strconv" + "strings" + + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type Formatter func(string) string + +type s1337xCrawler struct { + source string + formatter Formatter + logger *zap.Logger +} + +func New1337xCrawler(source string, formatter Formatter, logger *zap.Logger) *s1337xCrawler { + return &s1337xCrawler{ + source: source, + formatter: formatter, + logger: logger, + } +} + +func (c *s1337xCrawler) Crawl(page int) ([]*model.GameDownload, error) { + var resp *utils.FetchResponse + var doc *goquery.Document + var err error + requestUrl := fmt.Sprintf("%s/%s/%d/", constant.C1337xBaseURL, c.source, page) + resp, err = utils.Fetch(utils.FetchConfig{ + Url: requestUrl, + }) + if err != nil { + return nil, err + } + doc, err = goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + trSelection := doc.Find("tbody>tr") + urls := []string{} + trSelection.Each(func(i int, trNode *goquery.Selection) { + nameSelection := trNode.Find(".name").First() + if aNode := nameSelection.Find("a").Eq(1); aNode.Length() > 0 { + url, _ := aNode.Attr("href") + urls = append(urls, url) + } + }) + var res []*model.GameDownload + for _, u := range urls { + u = fmt.Sprintf("%s%s", constant.C1337xBaseURL, u) + if db.IsGameCrawledByURL(u) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameDownload(item) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + res = append(res, item) + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *s1337xCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + var item = &model.GameDownload{} + item.Url = url + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + selection := doc.Find(".torrent-detail-page ul.list>li") + info := make(map[string]string) + selection.Each(func(i int, item *goquery.Selection) { + info[strings.TrimSpace(item.Find("strong").Text())] = strings.TrimSpace(item.Find("span").Text()) + }) + magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`) + magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data)) + item.Size = info["Total size"] + item.RawName = doc.Find("title").Text() + item.RawName = strings.Replace(item.RawName, "Download ", "", 1) + item.RawName = strings.TrimSpace(strings.Replace(item.RawName, "Torrent | 1337x", " ", 1)) + item.Name = c.formatter(item.RawName) + item.Download = magnetRegexRes[0] + item.Author = strings.Replace(c.source, "-torrents", "", -1) + return item, nil +} + +func (c *s1337xCrawler) CrawlMulti(pages []int) (res []*model.GameDownload, err error) { + var items []*model.GameDownload + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + for _, page := range pages { + if page > totalPageNum { + continue + } + items, err = c.Crawl(page) + res = append(res, items...) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (c *s1337xCrawler) CrawlAll() (res []*model.GameDownload, err error) { + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + var items []*model.GameDownload + for i := 1; i <= totalPageNum; i++ { + items, err = c.Crawl(i) + res = append(res, items...) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (c *s1337xCrawler) GetTotalPageNum() (int, error) { + var resp *utils.FetchResponse + var doc *goquery.Document + var err error + + requestUrl := fmt.Sprintf("%s/%s/%d/", constant.C1337xBaseURL, c.source, 1) + resp, err = utils.Fetch(utils.FetchConfig{ + Url: requestUrl, + }) + if err != nil { + return 0, err + } + doc, _ = goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + selection := doc.Find(".last") + pageStr, exist := selection.Find("a").Attr("href") + if !exist { + return 0, errors.New("total page num not found") + } + pageStr = strings.ReplaceAll(pageStr, c.source, "") + pageStr = strings.ReplaceAll(pageStr, "/", "") + totalPageNum, err := strconv.Atoi(pageStr) + if err != nil { + return 0, err + } + return totalPageNum, nil +} diff --git a/crawler/armgddn.go b/crawler/armgddn.go new file mode 100644 index 0000000..c32997a --- /dev/null +++ b/crawler/armgddn.go @@ -0,0 +1,213 @@ +package crawler + +import ( + "crypto/tls" + "encoding/json" + "fmt" + "io" + "net/url" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "strings" + "time" + + "github.com/jlaffaye/ftp" + "go.uber.org/zap" +) + +const ( + ftpAddress = "72.21.17.26:13017" + ftpUsername = "ARMGDDNGames" + ftpPassword = "ARMGDDNGames" +) + +type GameData struct { + NumberOfGame string `json:"Number of game"` + AppID string `json:"appid"` + FolderName string `json:"foldername"` +} + +type ARMGDDNCrawler struct { + logger zap.Logger + conn *ftp.ServerConn +} + +// Deprecated: ARMGDDN has changed resource distribution method +func NewARMGDDNCrawler(logger *zap.Logger) *ARMGDDNCrawler { + return &ARMGDDNCrawler{ + logger: *logger, + } +} + +func (c *ARMGDDNCrawler) connectFTP() error { + var err error + tlsConfig := &tls.Config{InsecureSkipVerify: true} + c.conn, err = ftp.Dial(ftpAddress, ftp.DialWithTimeout(5*time.Second), ftp.DialWithExplicitTLS(tlsConfig)) + if err != nil { + return err + } + if err = c.conn.Login(ftpUsername, ftpPassword); err != nil { + return err + } + return nil +} + +func (c *ARMGDDNCrawler) fetchAndParseFTPData(filePath string) ([]GameData, error) { + r, err := c.conn.Retr(filePath) + if err != nil { + return nil, err + } + defer r.Close() + + buf, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + var data []GameData + if err = json.Unmarshal(buf, &data); err != nil { + return nil, err + } + return data, nil +} + +func (c *ARMGDDNCrawler) crawlGames(data []GameData, platform string, num int) ([]*model.GameDownload, error) { + count := 0 + var res []*model.GameDownload + modTimeMap := make(map[string]time.Time) + entries, err := c.conn.List(fmt.Sprintf("/%s", platform)) + if err != nil { + return nil, err + } + for _, entry := range entries { + if entry.Type == ftp.EntryTypeFolder { + modTimeMap[entry.Name] = entry.Time + } + } + for _, v := range data { + if count == num { + break + } + path := fmt.Sprintf("/%s/%s", platform, v.FolderName) + u := fmt.Sprintf("ARMGDDNGames/%s/%s", platform, v.NumberOfGame) + modTime, ok := modTimeMap[v.FolderName] + if !ok { + c.logger.Warn("mod time not found", zap.String("url", u)) + continue + } + updateFlag := fmt.Sprintf("ARMGDDNGames/%s/%s/%s", platform, v.NumberOfGame, modTime.UTC().String()) + if db.IsARMGDDNCrawled(updateFlag) { + continue + } + c.logger.Info("Crawling", zap.String("url", u)) + walker := c.conn.Walk(path) + size := int64(0) + for walker.Next() { + if walker.Stat().Type == ftp.EntryTypeFile { + fileSize, err := c.conn.FileSize(walker.Path()) + if err != nil { + c.logger.Warn("file size error", zap.Error(err)) + break + } + size += fileSize + } + } + item, err := db.GetGameDownloadByUrl(u) + if err != nil { + continue + } + item.Url = u + item.Name = ARMGDDNFormatter(v.FolderName) + item.UpdateFlag = updateFlag + item.Size = utils.FormatSize(size) + item.RawName = v.FolderName + item.Author = "ARMGDDN" + item.Download = fmt.Sprintf("ftpes://%s:%s@%s/%s/%s", ftpUsername, ftpPassword, ftpAddress, platform, url.QueryEscape(v.FolderName)) + if err := db.SaveGameDownload(item); err != nil { + continue + } + res = append(res, item) + count++ + var id int + var info *model.GameInfo + if v.AppID != "NONSTEAM" { + id, err = strconv.Atoi(v.AppID) + if err != nil { + c.logger.Warn("strconv error", zap.Error(err)) + continue + } + info, err = OrganizeGameDownloadWithSteam(id, item) + if err != nil { + continue + } + } else { + info, err = OrganizeGameDownload(item) + if err != nil { + continue + } + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("save game info error", zap.Error(err)) + continue + } + } + return res, nil +} + +func ARMGDDNFormatter(name string) string { + cleanedName := strings.ReplaceAll(strings.TrimSpace(name), "-ARMGDDN", "") + matchIndex := regexp.MustCompile(`v\d`).FindStringIndex(cleanedName) + if matchIndex == nil { + return cleanedName + } + return strings.TrimSpace(cleanedName[:matchIndex[0]]) +} + +func (c *ARMGDDNCrawler) CrawlPC(num int) ([]*model.GameDownload, error) { + return c.crawlPlatform("/PC/currentserverPC-FTP.json", "PC", num) +} + +func (c *ARMGDDNCrawler) CrawlPCVR(num int) ([]*model.GameDownload, error) { + return c.crawlPlatform("/PCVR/currentserverPCVR-FTP.json", "PCVR", num) +} + +func (c *ARMGDDNCrawler) Crawl(num int) ([]*model.GameDownload, error) { + num1 := num / 2 + num2 := num - num1 + if num == -1 { + num1 = -1 + num2 = -1 + } + res1, err := c.CrawlPC(num1) + if err != nil { + return nil, err + } + res2, err := c.CrawlPCVR(num2) + if err != nil { + return nil, err + } + return append(res1, res2...), nil +} + +func (c *ARMGDDNCrawler) CrawlAll() ([]*model.GameDownload, error) { + return c.Crawl(-1) +} + +func (c *ARMGDDNCrawler) crawlPlatform(jsonFile, platform string, num int) ([]*model.GameDownload, error) { + err := c.connectFTP() + if err != nil { + return nil, err + } + defer func() { _ = c.conn.Quit() }() + + data, err := c.fetchAndParseFTPData(jsonFile) + if err != nil { + return nil, err + } + + return c.crawlGames(data, platform, num) +} diff --git a/crawler/chovka.go b/crawler/chovka.go new file mode 100644 index 0000000..35d5582 --- /dev/null +++ b/crawler/chovka.go @@ -0,0 +1,179 @@ +package crawler + +import ( + "bytes" + "errors" + "fmt" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "strconv" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type ChovkaCrawler struct { + logger *zap.Logger +} + +func NewChovkaCrawler(logger *zap.Logger) *ChovkaCrawler { + return &ChovkaCrawler{ + logger: logger, + } +} + +func (c *ChovkaCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.Url = url + item.RawName = doc.Find(".inner-entry__title").First().Text() + item.Name = ChovkaFormatter(item.RawName) + item.Author = "Chovka" + item.UpdateFlag = item.RawName + downloadURL := doc.Find(".download-torrent").AttrOr("href", "") + if downloadURL == "" { + return nil, errors.New("Failed to find download URL") + } + resp, err = utils.Fetch(utils.FetchConfig{ + Headers: map[string]string{"Referer": url}, + Url: downloadURL, + }) + if err != nil { + return nil, err + } + magnet, size, err := utils.ConvertTorrentToMagnet(resp.Data) + if err != nil { + return nil, err + } + item.Size = size + item.Download = magnet + return item, nil +} + +func (c *ChovkaCrawler) Crawl(page int) ([]*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: fmt.Sprintf(constant.RepackInfoURL, page), + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + urls := []string{} + updateFlags := []string{} + doc.Find(".entry").Each(func(i int, s *goquery.Selection) { + u, exist := s.Find(".entry__title.h2 a").Attr("href") + if !exist { + return + } + urls = append(urls, u) + updateFlags = append(updateFlags, s.Find(".entry__title.h2 a").Text()) + }) + var res []*model.GameDownload + for i, u := range urls { + if db.IsChovkaCrawled(updateFlags[i]) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + if err := db.SaveGameDownload(item); err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + res = append(res, item) + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + if err := db.SaveGameInfo(info); err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *ChovkaCrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + var res []*model.GameDownload + for _, page := range pages { + items, err := c.Crawl(page) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *ChovkaCrawler) CrawlAll() ([]*model.GameDownload, error) { + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + var res []*model.GameDownload + for i := 1; i <= totalPageNum; i++ { + items, err := c.Crawl(i) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *ChovkaCrawler) GetTotalPageNum() (int, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: fmt.Sprintf(constant.RepackInfoURL, 1), + }) + if err != nil { + return 0, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return 0, err + } + pageStr := doc.Find(".pagination>a").Last().Text() + totalPageNum, err := strconv.Atoi(pageStr) + if err != nil { + return 0, err + } + return totalPageNum, nil +} + +func ChovkaFormatter(name string) string { + idx := strings.Index(name, "| RePack") + if idx != -1 { + name = name[:idx] + } + idx = strings.Index(name, "| GOG") + if idx != -1 { + name = name[:idx] + } + idx = strings.Index(name, "| Portable") + if idx != -1 { + name = name[:idx] + } + return strings.TrimSpace(name) +} diff --git a/crawler/crawler.go b/crawler/crawler.go new file mode 100644 index 0000000..9ac5953 --- /dev/null +++ b/crawler/crawler.go @@ -0,0 +1,38 @@ +package crawler + +import ( + "pcgamedb/model" + + "go.uber.org/zap" +) + +type Crawler interface { + Crawl(int) ([]*model.GameDownload, error) + CrawlAll() ([]*model.GameDownload, error) +} + +type SimpleCrawler interface { + Crawler +} + +type PagedCrawler interface { + Crawler + CrawlMulti([]int) ([]*model.GameDownload, error) + GetTotalPageNum() (int, error) +} + +func BuildCrawlerMap(logger *zap.Logger) map[string]Crawler { + return map[string]Crawler{ + "fitgirl": NewFitGirlCrawler(logger), + "dodi": NewDODICrawler(logger), + "kaoskrew": NewKaOsKrewCrawler(logger), + // "freegog": NewFreeGOGCrawler(logger), + "xatab": NewXatabCrawler(logger), + "onlinefix": NewOnlineFixCrawler(logger), + "steamrip": NewSteamRIPCrawler(logger), + // "armgddn": NewARMGDDNCrawler(logger), + "goggames": NewGOGGamesCrawler(logger), + "chovka": NewChovkaCrawler(logger), + // "gnarly": NewGnarlyCrawler(logger), + } +} diff --git a/crawler/dodi.go b/crawler/dodi.go new file mode 100644 index 0000000..1e64bcb --- /dev/null +++ b/crawler/dodi.go @@ -0,0 +1,98 @@ +package crawler + +import ( + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strings" + + "go.uber.org/zap" +) + +const DODIName string = "DODI-torrents" + +type DODICrawler struct { + logger *zap.Logger + crawler s1337xCrawler +} + +func NewDODICrawler(logger *zap.Logger) *DODICrawler { + return &DODICrawler{ + logger: logger, + crawler: *New1337xCrawler( + DODIName, + DODIFormatter, + logger, + ), + } +} + +func (c *DODICrawler) Crawl(page int) ([]*model.GameDownload, error) { + return c.crawler.Crawl(page) +} + +func (c *DODICrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + return c.crawler.CrawlByUrl(url) +} + +func (c *DODICrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + return c.crawler.CrawlMulti(pages) +} + +func (c *DODICrawler) CrawlAll() ([]*model.GameDownload, error) { + return c.crawler.CrawlAll() +} + +func (c *DODICrawler) GetTotalPageNum() (int, error) { + return c.crawler.GetTotalPageNum() +} + +var dodiRegexps = []*regexp.Regexp{ + regexp.MustCompile(`(?i)\s{2,}`), + regexp.MustCompile(`(?i)[\-\+]\s?[^:\-]*?\s(Edition|Bundle|Pack|Set|Remake|Collection)`), +} + +func DODIFormatter(name string) string { + name = strings.Replace(name, "- [DODI Repack]", "", -1) + name = strings.Replace(name, "- Campaign Remastered", "", -1) + name = strings.Replace(name, "- Remastered", "", -1) + if index := strings.Index(name, "+"); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "–"); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "("); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "["); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "- AiO"); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "- All In One"); index != -1 { + name = name[:index] + } + for _, re := range dodiRegexps { + name = strings.TrimSpace(re.ReplaceAllString(name, "")) + } + name = strings.TrimSpace(name) + name = strings.Replace(name, "- Portable", "", -1) + name = strings.Replace(name, "- Remastered", "", -1) + + if index := strings.Index(name, "/"); index != -1 { + names := strings.Split(name, "/") + longestLength := 0 + longestName := "" + for _, n := range names { + if !utils.ContainsRussian(n) && len(n) > longestLength { + longestLength = len(n) + longestName = n + } + } + name = longestName + } + + return strings.TrimSpace(name) +} diff --git a/crawler/fitgirl.go b/crawler/fitgirl.go new file mode 100644 index 0000000..da5d8e7 --- /dev/null +++ b/crawler/fitgirl.go @@ -0,0 +1,171 @@ +package crawler + +import ( + "bytes" + "errors" + "fmt" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type FitGirlCrawler struct { + logger *zap.Logger +} + +func NewFitGirlCrawler(logger *zap.Logger) *FitGirlCrawler { + return &FitGirlCrawler{ + logger: logger, + } +} + +func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + titleElem := doc.Find("h3").First().Find("strong") + if titleElem.Length() == 0 { + return nil, errors.New("Failed to find title") + } + rawTitle := titleElem.Text() + titleElem.Children().Remove() + title := strings.TrimSpace(titleElem.Text()) + sizeRegex := regexp.MustCompile(`Repack Size: (.*?)`) + sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data)) + if len(sizeRegexRes) == 0 { + return nil, errors.New("Failed to find size") + } + size := sizeRegexRes[1] + magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`) + magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data)) + if len(magnetRegexRes) == 0 { + return nil, errors.New("Failed to find magnet") + } + magnet := magnetRegexRes[0] + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.Name = strings.TrimSpace(title) + item.RawName = rawTitle + item.Url = url + item.Size = size + item.Author = "FitGirl" + item.Download = magnet + return item, nil +} + +func (c *FitGirlCrawler) Crawl(page int) ([]*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: fmt.Sprintf(constant.FitGirlURL, page), + }) + if err != nil { + c.logger.Error("Failed to fetch", zap.Error(err)) + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + c.logger.Error("Failed to parse HTML", zap.Error(err)) + return nil, err + } + urls := []string{} + updateFlags := []string{} //link+date + doc.Find("article").Each(func(i int, s *goquery.Selection) { + u, exist1 := s.Find(".entry-title>a").First().Attr("href") + d, exist2 := s.Find("time").First().Attr("datetime") + if exist1 && exist2 { + urls = append(urls, u) + updateFlags = append(updateFlags, fmt.Sprintf("%s%s", u, d)) + } + }) + var res []*model.GameDownload + for i, u := range urls { + if db.IsFitgirlCrawled(updateFlags[i]) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + item.UpdateFlag = updateFlags[i] + err = db.SaveGameDownload(item) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err)) + continue + } + res = append(res, item) + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *FitGirlCrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + var res []*model.GameDownload + for _, page := range pages { + items, err := c.Crawl(page) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *FitGirlCrawler) CrawlAll() ([]*model.GameDownload, error) { + var res []*model.GameDownload + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + for i := 1; i <= totalPageNum; i++ { + items, err := c.Crawl(i) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *FitGirlCrawler) GetTotalPageNum() (int, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: fmt.Sprintf(constant.FitGirlURL, 1), + }) + if err != nil { + return 0, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return 0, err + } + page, err := strconv.Atoi(doc.Find(".page-numbers.dots").First().Next().Text()) + if err != nil { + return 0, err + } + return page, nil +} diff --git a/crawler/freegog.go b/crawler/freegog.go new file mode 100644 index 0000000..825c083 --- /dev/null +++ b/crawler/freegog.go @@ -0,0 +1,154 @@ +package crawler + +import ( + "bytes" + "encoding/base64" + "html" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type FreeGOGCrawler struct { + logger *zap.Logger +} + +// Deprecated: Unable to get through cloudflare +func NewFreeGOGCrawler(logger *zap.Logger) *FreeGOGCrawler { + return &FreeGOGCrawler{ + logger: logger, + } +} + +func (c *FreeGOGCrawler) Crawl(num int) ([]*model.GameDownload, error) { + count := 0 + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.FreeGOGListURL, + }) + if err != nil { + c.logger.Error("Failed to fetch", zap.Error(err)) + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + c.logger.Error("Failed to parse HTML", zap.Error(err)) + return nil, err + } + + urls := []string{} + updateFlags := []string{} //rawName+link + doc.Find(".items-outer li a").Each(func(i int, s *goquery.Selection) { + urls = append(urls, s.AttrOr("href", "")) + updateFlags = append(updateFlags, s.Text()+s.AttrOr("href", "")) + }) + + res := []*model.GameDownload{} + for i, u := range urls { + if count == num { + break + } + if db.IsFreeGOGCrawled(updateFlags[i]) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + item.UpdateFlag = updateFlags[i] + err = db.SaveGameDownload(item) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err)) + continue + } + res = append(res, item) + count++ + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *FreeGOGCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.Url = url + rawTitleRegex := regexp.MustCompile(`(?i)

(.*?)

`) + rawTitleRegexRes := rawTitleRegex.FindStringSubmatch(string(resp.Data)) + rawName := "" + if len(rawTitleRegexRes) > 1 { + rawName = html.UnescapeString(rawTitleRegexRes[1]) + item.RawName = strings.Replace(rawName, "–", "-", -1) + } else { + return nil, err + } + item.Name = FreeGOGFormatter(item.RawName) + sizeRegex := regexp.MustCompile(`(?i)>Size:\s?(.*?)<`) + sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data)) + if len(sizeRegexRes) > 1 { + item.Size = sizeRegexRes[1] + } + magnetRegex := regexp.MustCompile(` 1 { + magnet, err := base64.StdEncoding.DecodeString(magnetRegexRes[1]) + if err != nil { + return nil, err + } + item.Download = string(magnet) + } else { + return nil, err + } + item.Author = "FreeGOG" + return item, nil +} + +func (c *FreeGOGCrawler) CrawlAll() ([]*model.GameDownload, error) { + return c.Crawl(-1) +} + +var freeGOGRegexps = []*regexp.Regexp{ + regexp.MustCompile(`(?i)\(.*\)`), +} + +func FreeGOGFormatter(name string) string { + for _, re := range freeGOGRegexps { + name = re.ReplaceAllString(name, "") + } + + reg1 := regexp.MustCompile(`(?i)v\d+(\.\d+)*`) + if index := reg1.FindIndex([]byte(name)); index != nil { + name = name[:index[0]] + } + if index := strings.Index(name, "+"); index != -1 { + name = name[:index] + } + + reg2 := regexp.MustCompile(`(?i):\sgoty`) + name = reg2.ReplaceAllString(name, ": Game Of The Year") + + return strings.TrimSpace(name) +} diff --git a/crawler/game.go b/crawler/game.go new file mode 100644 index 0000000..9c7c018 --- /dev/null +++ b/crawler/game.go @@ -0,0 +1,168 @@ +package crawler + +import ( + "errors" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strings" + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +func GenerateGameInfo(platform string, id int) (*model.GameInfo, error) { + switch platform { + case "steam": + return GenerateSteamGameInfo(id) + case "igdb": + return GenerateIGDBGameInfo(id) + default: + return nil, errors.New("Invalid ID type") + } +} + +func OrganizeGameDownload(game *model.GameDownload) (*model.GameInfo, error) { + item, err := OrganizeGameDownloadWithIGDB(0, game) + if err == nil { + if item.SteamID == 0 { + steamID, err := GetSteamIDByIGDBIDCache(item.IGDBID) + if err == nil { + item.SteamID = steamID + } + return item, nil + } + } + item, err = OrganizeGameDownloadWithSteam(0, game) + if err == nil { + if item.IGDBID == 0 { + igdbID, err := GetIGDBIDBySteamIDCache(item.SteamID) + if err == nil { + item.IGDBID = igdbID + } + } + return item, nil + } + return nil, err +} + +func AddGameInfoManually(gameID primitive.ObjectID, platform string, plateformID int) (*model.GameInfo, error) { + info, err := GenerateGameInfo(platform, plateformID) + if err != nil { + return nil, err + } + info.GameIDs = append(info.GameIDs, gameID) + info.GameIDs = utils.Unique(info.GameIDs) + return info, db.SaveGameInfo(info) +} + +func OrganizeGameDownloadManually(gameID primitive.ObjectID, platform string, platformID int) (*model.GameInfo, error) { + info, err := db.GetGameInfoByPlatformID(platform, platformID) + if err != nil { + if err == mongo.ErrNoDocuments { + info, err = AddGameInfoManually(gameID, platform, platformID) + if err != nil { + return nil, err + } + } else { + return nil, err + } + } + info.GameIDs = append(info.GameIDs, gameID) + info.GameIDs = utils.Unique(info.GameIDs) + err = db.SaveGameInfo(info) + if err != nil { + return nil, err + } + if platform == "igdb" { + steamID, err := GetSteamIDByIGDBIDCache(platformID) + if err == nil { + info.SteamID = steamID + } + } + if platform == "steam" { + igdbID, err := GetIGDBIDBySteamIDCache(platformID) + if err == nil { + info.IGDBID = igdbID + } + } + return info, nil +} + +func FormatName(name string) string { + name = regexp.MustCompile(`(?i)[\w’'-]+\s(Edition|Vision|Collection|Bundle|Pack|Deluxe)`).ReplaceAllString(name, " ") + name = regexp.MustCompile(`(?i)GOTY`).ReplaceAllString(name, "") + name = regexp.MustCompile(`(?i)nsw for pc`).ReplaceAllString(name, "") + name = regexp.MustCompile(`\([^\)]+\)`).ReplaceAllString(name, "") + name = regexp.MustCompile(`\s+`).ReplaceAllString(name, " ") + name = strings.Replace(name, ": Remastered", "", -1) + name = strings.Replace(name, ": Remaster", "", -1) + name = strings.TrimSpace(name) + name = strings.Trim(name, ":") + return name +} + +func TransformSteamIDToIGDBID() { + gameInfos, err := db.GetGameInfoWithSteamID() + if err != nil { + return + } + for _, info := range gameInfos { + id, err := GetIGDBIDBySteamIDCache(info.SteamID) + if err != nil { + continue + } + existedInfo, err := db.GetGameInfoByPlatformID("igdb", id) + if err == nil { + existedInfo.GameIDs = append(existedInfo.GameIDs, info.GameIDs...) + existedInfo.GameIDs = utils.Unique(existedInfo.GameIDs) + _ = db.SaveGameInfo(existedInfo) + _ = db.DeleteGameInfoByID(info.ID) + } else { + if err == mongo.ErrNoDocuments { + newInfo, err := GenerateIGDBGameInfo(id) + if err != nil { + continue + } + newInfo.ID = info.ID + newInfo.CreatedAt = info.CreatedAt + newInfo.GameIDs = info.GameIDs + _ = db.SaveGameInfo(newInfo) + } + } + } +} + +func SupplementGameInfoPlatformID() error { + infos, err := db.GetAllGameInfos() + if err != nil { + return err + } + for _, info := range infos { + changed := false + if info.IGDBID != 0 && info.SteamID == 0 { + steamID, err := GetSteamIDByIGDBIDCache(info.IGDBID) + time.Sleep(time.Millisecond * 100) + if err != nil { + continue + } + info.SteamID = steamID + changed = true + } + if info.SteamID != 0 && info.IGDBID == 0 { + igdbID, err := GetIGDBIDBySteamIDCache(info.SteamID) + time.Sleep(time.Millisecond * 100) + if err != nil { + continue + } + info.IGDBID = igdbID + changed = true + } + if changed { + _ = db.SaveGameInfo(info) + } + } + return nil +} diff --git a/crawler/gnarly.go b/crawler/gnarly.go new file mode 100644 index 0000000..c18c13d --- /dev/null +++ b/crawler/gnarly.go @@ -0,0 +1,105 @@ +package crawler + +import ( + "bytes" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type GnarlyCrawler struct { + logger *zap.Logger +} + +func NewGnarlyCrawler(logger *zap.Logger) *GnarlyCrawler { + return &GnarlyCrawler{ + logger: logger, + } +} + +func (c *GnarlyCrawler) Crawl(num int) ([]*model.GameDownload, error) { + var res []*model.GameDownload + count := 0 + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.GnarlyURL, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + sizeRegex := regexp.MustCompile(`\[(\d+)\s(GB|MB)\]`) + pElementHtml := make([]string, 0) + doc.Find("p").Each(func(i int, s *goquery.Selection) { + pElementHtml = append(pElementHtml, s.Text()) + }) + for _, s := range pElementHtml { + if strings.Contains(s, "https://bin.0xfc.de/") { + lines := strings.Split(s, "\n") + for i := 0; i < len(lines); i++ { + if strings.Contains(lines[i], "[Gnarly Repacks]") { + i++ + if strings.Contains(lines[i], "https://bin.0xfc.de/") { + if count == num { + return res, nil + } + if db.IsGnarlyCrawled(lines[i-1]) { + continue + } + item, err := db.GetGameDownloadByUrl(lines[i]) + if err != nil { + continue + } + sizeRegexRes := sizeRegex.FindStringSubmatch(lines[i]) + if len(sizeRegexRes) == 3 { + item.Size = sizeRegexRes[1] + " " + sizeRegexRes[2] + } + c.logger.Info("Crawling", zap.String("Name", lines[i-1])) + item.RawName = lines[i-1] + item.Url = constant.GnarlyURL + item.Author = "Gnarly" + item.Name = GnarlyFormatter(item.RawName) + download, err := utils.DecryptPrivateBin(lines[i], "gnarly") + if err != nil { + continue + } + item.Download = download + item.UpdateFlag = item.RawName + res = append(res, item) + count++ + info, err := OrganizeGameDownload(item) + if err != nil { + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save game info", zap.Error(err)) + continue + } + } + } + } + } + } + return res, nil +} + +func (c *GnarlyCrawler) CrawlAll() ([]*model.GameDownload, error) { + return c.Crawl(-1) +} + +var parenthesesRegex = regexp.MustCompile(`\(([^)]+)\)`) + +func GnarlyFormatter(name string) string { + name = name[:strings.Index(name, " [Gnarly Repacks]")] + name = parenthesesRegex.ReplaceAllString(name, "") + return strings.TrimSpace(name) +} diff --git a/crawler/goggames.go b/crawler/goggames.go new file mode 100644 index 0000000..e25e6ec --- /dev/null +++ b/crawler/goggames.go @@ -0,0 +1,153 @@ +package crawler + +import ( + "bytes" + "errors" + "fmt" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type GOGGamesCrawler struct { + logger *zap.Logger +} + +func NewGOGGamesCrawler(logger *zap.Logger) *GOGGamesCrawler { + return &GOGGamesCrawler{ + logger: logger, + } +} + +func (c *GOGGamesCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + name := strings.TrimSpace(doc.Find("#game-details>.container>h1").First().Text()) + magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`) + magnetRegexRes := magnetRegex.FindString(string(resp.Data)) + if magnetRegexRes == "" { + return nil, errors.New("magnet not found") + } + sizeStrs := make([]string, 0) + doc.Find(".container>.items-group").First().Find(".filesize").Each(func(i int, s *goquery.Selection) { + sizeStrs = append(sizeStrs, s.Text()) + }) + size, err := utils.SubSizeStrings(sizeStrs) + if err != nil { + return nil, err + } + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.Name = name + item.RawName = name + item.Download = magnetRegexRes + item.Url = url + item.Size = size + item.Author = "GOGGames" + return item, nil +} + +func (c *GOGGamesCrawler) Crawl(page int) ([]*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: fmt.Sprintf(constant.GOGGamesURL, page), + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + urls := make([]string, 0) + doc.Find(".game-blocks>a").Each(func(i int, s *goquery.Selection) { + u, exist := s.Attr("href") + if !exist { + return + } + urls = append(urls, fmt.Sprintf("%s%s", constant.GOGGamesBaseURL, u)) + }) + res := make([]*model.GameDownload, 0) + for _, u := range urls { + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + if err := db.SaveGameDownload(item); err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + res = append(res, item) + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + if err := db.SaveGameInfo(info); err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *GOGGamesCrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + res := make([]*model.GameDownload, 0) + for _, page := range pages { + items, err := c.Crawl(page) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *GOGGamesCrawler) CrawlAll() ([]*model.GameDownload, error) { + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + var res []*model.GameDownload + for i := 1; i <= totalPageNum; i++ { + items, err := c.Crawl(i) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *GOGGamesCrawler) GetTotalPageNum() (int, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: fmt.Sprintf(constant.GOGGamesURL, 1), + }) + if err != nil { + return 0, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return 0, err + } + btns := doc.Find(".pagination>.btn") + return strconv.Atoi(strings.TrimSpace(btns.Eq(btns.Length() - 2).Text())) +} diff --git a/crawler/igdb.go b/crawler/igdb.go new file mode 100644 index 0000000..a0ba5ec --- /dev/null +++ b/crawler/igdb.go @@ -0,0 +1,479 @@ +package crawler + +import ( + "encoding/json" + "errors" + "fmt" + "net/url" + "pcgamedb/cache" + "pcgamedb/config" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "runtime/debug" + "strconv" + "strings" +) + +var TwitchToken string + +func _GetIGDBID(name string) (int, error) { + var err error + if TwitchToken == "" { + TwitchToken, err = LoginTwitch() + if err != nil { + return 0, fmt.Errorf("failed to login twitch: %w", err) + } + } + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBSearchURL, + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: fmt.Sprintf(`search "%s"; fields *; limit 50; where game.platforms = [6] | game.platforms=[130] | game.platforms=[384] | game.platforms=[163];`, name), + Method: "POST", + }) + if string(resp.Data) == "[]" { + resp, err = utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBSearchURL, + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: fmt.Sprintf(`search "%s"; fields *; limit 50;`, name), + Method: "POST", + }) + } + if err != nil { + return 0, err + } + var data model.IGDBSearches + if err = json.Unmarshal(resp.Data, &data); err != nil { + return 0, fmt.Errorf("failed to unmarshal: %w, %s", err, debug.Stack()) + } + if len(data) == 1 { + return data[0].Game, nil + } + for _, item := range data { + if strings.EqualFold(item.Name, name) { + return item.Game, nil + } + if utils.Similarity(name, item.Name) >= 0.8 { + return item.Game, nil + } + detail, err := GetIGDBAppDetailCache(item.Game) + if err != nil { + return 0, err + } + for _, alternativeNames := range detail.AlternativeNames { + if utils.Similarity(alternativeNames.Name, name) >= 0.8 { + return item.Game, nil + } + } + } + return 0, fmt.Errorf("IGDB ID not found: %s", name) +} + +func GetIGDBID(name string) (int, error) { + name1 := name + name2 := FormatName(name) + names := []string{name1} + if name1 != name2 { + names = append(names, name2) + } + for _, name := range names { + id, err := _GetIGDBID(name) + if err == nil { + return id, nil + } + } + return 0, errors.New("IGDB ID not found") +} + +func GetIGDBIDCache(name string) (int, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("igdb_id:%s", name) + val, exist := cache.Get(key) + if exist { + id, err := strconv.Atoi(val) + if err != nil { + return 0, err + } + return id, nil + } else { + id, err := GetIGDBID(name) + if err != nil { + return 0, err + } + _ = cache.Add(key, id) + return id, nil + } + } else { + return GetIGDBID(name) + } +} + +func GetIGDBAppDetail(id int) (*model.IGDBGameDetail, error) { + var err error + if TwitchToken == "" { + TwitchToken, err = LoginTwitch() + if err != nil { + return nil, err + } + } + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBGameURL, + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: fmt.Sprintf(`where id=%v ;fields *,alternative_names.name,language_supports.language,language_supports.language_support_type,screenshots.url,cover.url,involved_companies.company,involved_companies.developer,involved_companies.publisher;`, id), + Method: "POST", + }) + if err != nil { + return nil, err + } + var data model.IGDBGameDetails + if err = json.Unmarshal(resp.Data, &data); err != nil { + return nil, err + } + if len(data) == 0 { + return nil, errors.New("IGDB App not found") + } + if data[0].Name == "" { + return GetIGDBAppDetail(id) + } + return data[0], nil +} + +func GetIGDBAppDetailCache(id int) (*model.IGDBGameDetail, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("igdb_game:%v", id) + val, exist := cache.Get(key) + if exist { + var data model.IGDBGameDetail + if err := json.Unmarshal([]byte(val), &data); err != nil { + return nil, err + } + return &data, nil + } else { + data, err := GetIGDBAppDetail(id) + if err != nil { + return nil, err + } + dataBytes, err := json.Marshal(data) + if err != nil { + return nil, err + } + _ = cache.Add(key, dataBytes) + return data, nil + } + } else { + return GetIGDBAppDetail(id) + } +} + +func LoginTwitch() (string, error) { + baseURL, _ := url.Parse(constant.TwitchAuthURL) + params := url.Values{} + params.Add("client_id", config.Config.Twitch.ClientID) + params.Add("client_secret", config.Config.Twitch.ClientSecret) + params.Add("grant_type", "client_credentials") + baseURL.RawQuery = params.Encode() + resp, err := utils.Fetch(utils.FetchConfig{ + Url: baseURL.String(), + Method: "POST", + Headers: map[string]string{ + "User-Agent": "", + }, + }) + if err != nil { + return "", err + } + data := struct { + AccessToken string `json:"access_token"` + }{} + err = json.Unmarshal(resp.Data, &data) + if err != nil { + return "", err + } + return data.AccessToken, nil +} + +func GetIGDBCompany(id int) (string, error) { + var err error + if TwitchToken == "" { + TwitchToken, err = LoginTwitch() + if err != nil { + return "", err + } + } + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBCompaniesURL, + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: fmt.Sprintf(`where id=%v; fields *;`, id), + Method: "POST", + }) + if err != nil { + return "", err + } + var data model.IGDBCompanies + if err = json.Unmarshal(resp.Data, &data); err != nil { + return "", err + } + if len(data) == 0 { + return "", errors.New("Not found") + } + if data[0].Name == "" { + return GetIGDBCompany(id) + } + return data[0].Name, nil +} + +func GetIGDBCompanyCache(id int) (string, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("igdb_companies:%v", id) + val, exist := cache.Get(key) + if exist { + return val, nil + } else { + data, err := GetIGDBCompany(id) + if err != nil { + return "", err + } + _ = cache.Add(key, data) + return data, nil + } + } else { + return GetIGDBCompany(id) + } +} + +func GenerateIGDBGameInfo(id int) (*model.GameInfo, error) { + item := &model.GameInfo{} + detail, err := GetIGDBAppDetailCache(id) + if err != nil { + return nil, err + } + item.IGDBID = id + item.Name = detail.Name + item.Description = detail.Summary + item.Cover = strings.Replace(detail.Cover.URL, "t_thumb", "t_original", 1) + + for _, lang := range detail.LanguageSupports { + if lang.LanguageSupportType == 3 { + l, exist := constant.IGDBLanguages[lang.Language] + if !exist { + continue + } + item.Languages = append(item.Languages, l.Name) + } + } + + for _, screenshot := range detail.Screenshots { + item.Screenshots = append(item.Screenshots, strings.Replace(screenshot.URL, "t_thumb", "t_original", 1)) + } + + for _, alias := range detail.AlternativeNames { + item.Aliases = append(item.Aliases, alias.Name) + } + + for _, company := range detail.InvolvedCompanies { + if company.Developer || company.Publisher { + companyName, err := GetIGDBCompanyCache(company.Company) + if err != nil { + continue + } + if company.Developer { + item.Developers = append(item.Developers, companyName) + } + if company.Publisher { + item.Publishers = append(item.Publishers, companyName) + } + } + } + + return item, nil +} + +func OrganizeGameDownloadWithIGDB(id int, game *model.GameDownload) (*model.GameInfo, error) { + var err error + if id == 0 { + id, err = GetIGDBIDCache(game.Name) + if err != nil { + return nil, err + } + } + d, err := db.GetGameInfoByPlatformID("igdb", id) + if err == nil { + d.GameIDs = append(d.GameIDs, game.ID) + d.GameIDs = utils.Unique(d.GameIDs) + return d, nil + } + info, err := GenerateGameInfo("igdb", id) + if err != nil { + return nil, err + } + info.GameIDs = append(info.GameIDs, game.ID) + info.GameIDs = utils.Unique(info.GameIDs) + return info, nil +} + +func GetIGDBIDBySteamID(id int) (int, error) { + var err error + if TwitchToken == "" { + TwitchToken, err = LoginTwitch() + if err != nil { + return 0, err + } + } + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBWebsitesURL, + Method: "POST", + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: fmt.Sprintf(`where url = "https://store.steampowered.com/app/%v" | url = "https://store.steampowered.com/app/%v/"*; fields *; limit 500;`, id, id), + }) + if err != nil { + return 0, err + } + var data []struct { + Game int `json:"game"` + } + if err = json.Unmarshal(resp.Data, &data); err != nil { + return 0, err + } + if len(data) == 0 { + return 0, errors.New("Not found") + } + if data[0].Game == 0 { + return GetIGDBIDBySteamID(id) + } + return data[0].Game, nil +} + +func GetIGDBIDBySteamIDCache(id int) (int, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("igdb_id_by_steam_id:%v", id) + val, exist := cache.Get(key) + if exist { + return strconv.Atoi(val) + } else { + data, err := GetIGDBIDBySteamID(id) + if err != nil { + return 0, err + } + _ = cache.Add(key, strconv.Itoa(data)) + return data, nil + } + } else { + return GetIGDBIDBySteamID(id) + } +} + +func GetIGDBIDBySteamIDs(ids []int) (map[int]int, error) { + var err error + if TwitchToken == "" { + TwitchToken, err = LoginTwitch() + if err != nil { + return nil, err + } + } + conditionBuilder := strings.Builder{} + for _, id := range ids { + conditionBuilder.WriteString(fmt.Sprintf(`url = "https://store.steampowered.com/app/%v" | `, id)) + conditionBuilder.WriteString(fmt.Sprintf(`url = "https://store.steampowered.com/app/%v/"* | `, id)) + } + condition := strings.TrimSuffix(conditionBuilder.String(), " | ") + respBody := fmt.Sprintf(`where %s; fields *; limit 500;`, condition) + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBWebsitesURL, + Method: "POST", + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: respBody, + }) + if err != nil { + return nil, err + } + var data []struct { + Game int `json:"game"` + Url string `json:"url"` + } + if err = json.Unmarshal(resp.Data, &data); err != nil { + return nil, err + } + ret := make(map[int]int) + regex := regexp.MustCompile(`https://store.steampowered.com/app/(\d+)/?`) + for _, d := range data { + idStr := regex.FindStringSubmatch(d.Url) + if len(idStr) < 2 { + continue + } + id, err := strconv.Atoi(idStr[1]) + if err == nil { + ret[id] = d.Game + } + } + for _, id := range ids { + if _, ok := ret[id]; !ok { + ret[id] = 0 + } + } + return ret, nil +} + +func GetIGDBIDBySteamIDsCache(ids []int) (map[int]int, error) { + res := make(map[int]int) + notExistIDs := make([]int, 0) + if config.Config.RedisAvaliable { + for _, steamID := range ids { + key := fmt.Sprintf("igdb_id_by_steam_id:%v", steamID) + val, exist := cache.Get(key) + if exist { + igdbID, _ := strconv.Atoi(val) + res[steamID] = igdbID + } else { + notExistIDs = append(notExistIDs, steamID) + } + } + if len(res) == len(ids) { + return res, nil + } + idMap, err := GetIGDBIDBySteamIDs(notExistIDs) + if err != nil { + return nil, err + } + for steamID, igdbID := range idMap { + res[steamID] = igdbID + if igdbID != 0 { + _ = cache.Add(fmt.Sprintf("igdb_id_by_steam_id:%v", steamID), igdbID) + } + } + return res, nil + } else { + return GetIGDBIDBySteamIDs(ids) + } +} diff --git a/crawler/kaoskrew.go b/crawler/kaoskrew.go new file mode 100644 index 0000000..bc7533d --- /dev/null +++ b/crawler/kaoskrew.go @@ -0,0 +1,66 @@ +package crawler + +import ( + "pcgamedb/model" + "regexp" + "strings" + + "go.uber.org/zap" +) + +const KaOsKrewName string = "KaOsKrew-torrents" + +type KaOsKrewCrawler struct { + logger *zap.Logger + crawler s1337xCrawler +} + +func NewKaOsKrewCrawler(logger *zap.Logger) *KaOsKrewCrawler { + return &KaOsKrewCrawler{ + logger: logger, + crawler: *New1337xCrawler( + KaOsKrewName, + KaOsKrewFormatter, + logger, + ), + } +} + +func (c *KaOsKrewCrawler) Crawl(page int) ([]*model.GameDownload, error) { + return c.crawler.Crawl(page) +} + +func (c *KaOsKrewCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + return c.crawler.CrawlByUrl(url) +} + +func (c *KaOsKrewCrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + return c.crawler.CrawlMulti(pages) +} + +func (c *KaOsKrewCrawler) CrawlAll() ([]*model.GameDownload, error) { + return c.crawler.CrawlAll() +} + +func (c *KaOsKrewCrawler) GetTotalPageNum() (int, error) { + return c.crawler.GetTotalPageNum() +} + +var kaOsKrewRegexps = []*regexp.Regexp{ + regexp.MustCompile(`(?i)\.REPACK2?-KaOs`), + regexp.MustCompile(`(?i)\.UPDATE-KaOs`), + regexp.MustCompile(`(?i)v\.?\d+(\.\d+)*|Build\.\d+`), + regexp.MustCompile(`(?i)\.MULTi\d+`), + regexp.MustCompile(`(?i)\sgoty`), +} + +func KaOsKrewFormatter(name string) string { + if index := kaOsKrewRegexps[2].FindIndex([]byte(name)); index != nil { + name = name[:index[0]] + } + for _, re := range kaOsKrewRegexps { + name = re.ReplaceAllString(name, "") + } + name = strings.Replace(name, ".", " ", -1) + return strings.TrimSpace(name) +} diff --git a/crawler/onlinefix.go b/crawler/onlinefix.go new file mode 100644 index 0000000..0ed08fa --- /dev/null +++ b/crawler/onlinefix.go @@ -0,0 +1,317 @@ +package crawler + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "net/url" + "os" + "pcgamedb/config" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type OnlineFixCrawler struct { + logger *zap.Logger + cookies map[string]string +} + +func NewOnlineFixCrawler(logger *zap.Logger) *OnlineFixCrawler { + return &OnlineFixCrawler{ + logger: logger, + cookies: map[string]string{}, + } +} + +func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameDownload, error) { + if !config.Config.OnlineFixAvaliable { + c.logger.Error("Need Online Fix account") + return nil, errors.New("Online Fix is not available") + } + if len(c.cookies) == 0 { + err := c.login() + if err != nil { + c.logger.Error("Failed to login", zap.Error(err)) + return nil, err + } + } + requestURL := fmt.Sprintf("%s/page/%d/", constant.OnlineFixURL, page) + resp, err := utils.Fetch(utils.FetchConfig{ + Url: requestURL, + Cookies: c.cookies, + Headers: map[string]string{ + "Referer": constant.OnlineFixURL, + }, + }) + if err != nil { + c.logger.Error("Failed to fetch", zap.Error(err)) + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + c.logger.Error("Failed to parse HTML", zap.Error(err)) + return nil, err + } + urls := []string{} + updateFlags := []string{} //link+date + doc.Find("article.news").Each(func(i int, s *goquery.Selection) { + urls = append(urls, s.Find(".big-link").First().AttrOr("href", "")) + updateFlags = append( + updateFlags, + s.Find(".big-link").First().AttrOr("href", "")+ + s.Find("time").Text(), + ) + }) + + var res []*model.GameDownload + for i, u := range urls { + if db.IsOnlineFixCrawled(updateFlags[i]) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + item.UpdateFlag = updateFlags[i] + err = db.SaveGameDownload(item) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err)) + continue + } + res = append(res, item) + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *OnlineFixCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + if len(c.cookies) == 0 { + err := c.login() + if err != nil { + c.logger.Error("Failed to login", zap.Error(err)) + return nil, err + } + } + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + Cookies: c.cookies, + Headers: map[string]string{ + "Referer": constant.OnlineFixURL, + }, + }) + if err != nil { + return nil, err + } + titleRegex := regexp.MustCompile(`(?i)(.*?)`) + titleRegexRes := titleRegex.FindAllStringSubmatch(string(resp.Data), -1) + if len(titleRegexRes) == 0 { + return nil, errors.New("Failed to find title") + } + downloadRegex := regexp.MustCompile(`(?i)]*\bhref="([^"]+)"[^>]*>(Скачать Torrent|Скачать торрент)`) + downloadRegexRes := downloadRegex.FindAllStringSubmatch(string(resp.Data), -1) + if len(downloadRegexRes) == 0 { + return nil, errors.New("Failed to find download button") + } + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.RawName = titleRegexRes[0][1] + item.Name = OnlineFixFormatter(item.RawName) + item.Url = url + item.Author = "OnlineFix" + item.Size = "0" + resp, err = utils.Fetch(utils.FetchConfig{ + Url: downloadRegexRes[0][1], + Cookies: c.cookies, + Headers: map[string]string{ + "Referer": url, + }, + }) + if err != nil { + return nil, err + } + if strings.Contains(downloadRegexRes[0][1], "uploads.online-fix.me") { + magnetRegex := regexp.MustCompile(`(?i)"(.*?).torrent"`) + magnetRegexRes := magnetRegex.FindAllStringSubmatch(string(resp.Data), -1) + if len(magnetRegexRes) == 0 { + return nil, errors.New("Failed to find magnet") + } + resp, err = utils.Fetch(utils.FetchConfig{ + Url: downloadRegexRes[0][1] + strings.Trim(magnetRegexRes[0][0], "\""), + Cookies: c.cookies, + Headers: map[string]string{ + "Referer": url, + }, + }) + if err != nil { + return nil, err + } + item.Download, item.Size, err = utils.ConvertTorrentToMagnet(resp.Data) + if err != nil { + return nil, err + } + } else if strings.Contains(downloadRegexRes[0][1], "online-fix.me/ext") { + if strings.Contains(string(resp.Data), "mega.nz") { + if !config.Config.MegaAvaliable { + return nil, errors.New("Mega is not avaliable") + } + megaRegex := regexp.MustCompile(`(?i)location.href=\\'([^\\']*)\\'`) + megaRegexRes := megaRegex.FindAllStringSubmatch(string(resp.Data), -1) + if len(megaRegexRes) == 0 { + return nil, errors.New("Failed to find download link") + } + path, files, err := utils.MegaDownload(megaRegexRes[0][1], "torrent") + if err != nil { + return nil, err + } + torrent := "" + for _, file := range files { + if strings.HasSuffix(file, ".torrent") { + torrent = file + break + } + } + dataBytes, err := os.ReadFile(torrent) + if err != nil { + return nil, err + } + item.Download, item.Size, err = utils.ConvertTorrentToMagnet(dataBytes) + if err != nil { + return nil, err + } + _ = os.RemoveAll(path) + } else { + return nil, errors.New("Failed to find download link") + } + } else { + return nil, errors.New("Failed to find download link") + } + return item, nil +} + +func (c *OnlineFixCrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + var res []*model.GameDownload + for _, page := range pages { + items, err := c.Crawl(page) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *OnlineFixCrawler) CrawlAll() ([]*model.GameDownload, error) { + var res []*model.GameDownload + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + for i := 1; i <= totalPageNum; i++ { + items, err := c.Crawl(i) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *OnlineFixCrawler) GetTotalPageNum() (int, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.OnlineFixURL, + Headers: map[string]string{ + "Referer": constant.OnlineFixURL, + }, + }) + if err != nil { + return 0, err + } + pageRegex := regexp.MustCompile(`(?i).*?`) + pageRegexRes := pageRegex.FindAllStringSubmatch(string(resp.Data), -1) + if len(pageRegexRes) == 0 { + return 0, err + } + totalPageNum, err := strconv.Atoi(pageRegexRes[len(pageRegexRes)-2][1]) + if err != nil { + return 0, err + } + return totalPageNum, nil +} + +type csrf struct { + Field string `json:"field"` + Value string `json:"value"` +} + +func (c *OnlineFixCrawler) login() error { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.OnlineFixCSRFURL, + Headers: map[string]string{ + "X-Requested-With": "XMLHttpRequest", + "Referer": constant.OnlineFixURL, + }, + }) + if err != nil { + return err + } + var csrf csrf + if err = json.Unmarshal(resp.Data, &csrf); err != nil { + return err + } + + for _, cookie := range resp.Cookie { + c.cookies[cookie.Name] = cookie.Value + } + params := url.Values{} + params.Add("login_name", config.Config.OnlineFix.User) + params.Add("login_password", config.Config.OnlineFix.Password) + params.Add(csrf.Field, csrf.Value) + params.Add("login", "submit") + resp, err = utils.Fetch(utils.FetchConfig{ + Url: constant.OnlineFixURL, + Method: "POST", + Cookies: c.cookies, + Headers: map[string]string{ + "Origin": constant.OnlineFixURL, + "Content-Type": "application/x-www-form-urlencoded", + "Referer": constant.OnlineFixURL, + }, + Data: params, + }) + if err != nil { + return err + } + for _, cookie := range resp.Cookie { + c.cookies[cookie.Name] = cookie.Value + } + return nil +} + +func OnlineFixFormatter(name string) string { + name = strings.Replace(name, "по сети", "", -1) + reg1 := regexp.MustCompile(`(?i)\(.*?\)`) + name = reg1.ReplaceAllString(name, "") + return strings.TrimSpace(name) +} diff --git a/crawler/steam.go b/crawler/steam.go new file mode 100644 index 0000000..c0d71e3 --- /dev/null +++ b/crawler/steam.go @@ -0,0 +1,272 @@ +package crawler + +import ( + "encoding/json" + "errors" + "fmt" + "net/url" + "pcgamedb/cache" + "pcgamedb/config" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "strings" +) + +func _GetSteamID(name string) (int, error) { + baseURL, _ := url.Parse(constant.SteamSearchURL) + params := url.Values{} + params.Add("term", name) + baseURL.RawQuery = params.Encode() + + resp, err := utils.Fetch(utils.FetchConfig{ + Url: baseURL.String(), + }) + if err != nil { + return 0, err + } + idRegex := regexp.MustCompile(`data-ds-appid="(.*?)"`) + nameRegex := regexp.MustCompile(`(.*?)`) + idRegexRes := idRegex.FindAllStringSubmatch(string(resp.Data), -1) + nameRegexRes := nameRegex.FindAllStringSubmatch(string(resp.Data), -1) + + if len(idRegexRes) == 0 { + return 0, fmt.Errorf("Steam ID not found: %s", name) + } + + maxSim := 0.0 + maxSimID := 0 + for i, id := range idRegexRes { + idStr := id[1] + nameStr := nameRegexRes[i][1] + if index := strings.Index(idStr, ","); index != -1 { + idStr = idStr[:index] + } + if strings.EqualFold(strings.TrimSpace(nameStr), strings.TrimSpace(name)) { + return strconv.Atoi(idStr) + } else { + sim := utils.Similarity(nameStr, name) + if sim >= 0.8 && sim > maxSim { + maxSim = sim + maxSimID, _ = strconv.Atoi(idStr) + } + } + } + if maxSimID != 0 { + return maxSimID, nil + } + return 0, fmt.Errorf("Steam ID not found: %s", name) +} + +func GetSteamID(name string) (int, error) { + name1 := name + name2 := FormatName(name) + names := []string{name1} + if name1 != name2 { + names = append(names, name2) + } + for _, n := range names { + id, err := _GetSteamID(n) + if err == nil { + return id, nil + } + } + return 0, errors.New("Steam ID not found") +} + +func GetSteamIDCache(name string) (int, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("steam_id:%s", name) + val, exist := cache.Get(key) + if exist { + id, err := strconv.Atoi(val) + if err != nil { + return 0, err + } + return id, nil + } else { + id, err := GetSteamID(name) + if err != nil { + return 0, err + } + _ = cache.Add(key, id) + return id, nil + } + } else { + return GetSteamID(name) + } +} + +func GetSteamAppDetail(id int) (*model.SteamAppDetail, error) { + baseURL, _ := url.Parse(constant.SteamAppDetailURL) + params := url.Values{} + params.Add("appids", strconv.Itoa(id)) + // params.Add("l", "schinese") + baseURL.RawQuery = params.Encode() + resp, err := utils.Fetch(utils.FetchConfig{ + Url: baseURL.String(), + Headers: map[string]string{ + "User-Agent": "", + }, + }) + if err != nil { + return nil, err + } + var detail map[string]*model.SteamAppDetail + if err = json.Unmarshal(resp.Data, &detail); err != nil { + return nil, err + } + if _, ok := detail[strconv.Itoa(id)]; !ok { + return nil, fmt.Errorf("Steam App not found: %d", id) + } + if detail[strconv.Itoa(id)] == nil { + return nil, fmt.Errorf("Steam App not found: %d", id) + } + return detail[strconv.Itoa(id)], nil +} + +func GetSteamAppDetailCache(id int) (*model.SteamAppDetail, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("steam_game:%d", id) + val, exist := cache.Get(key) + if exist { + var detail model.SteamAppDetail + if err := json.Unmarshal([]byte(val), &detail); err != nil { + return nil, err + } + return &detail, nil + } else { + data, err := GetSteamAppDetail(id) + if err != nil { + return nil, err + } + dataBytes, err := json.Marshal(data) + if err != nil { + return nil, err + } + _ = cache.Add(key, dataBytes) + return data, nil + } + } else { + return GetSteamAppDetail(id) + } +} + +func GenerateSteamGameInfo(id int) (*model.GameInfo, error) { + item := &model.GameInfo{} + detail, err := GetSteamAppDetailCache(id) + if err != nil { + return nil, err + } + item.SteamID = id + item.Name = detail.Data.Name + item.Description = detail.Data.ShortDescription + item.Cover = fmt.Sprintf("https://shared.cloudflare.steamstatic.com/store_item_assets/steam/apps/%v/library_600x900_2x.jpg", id) + item.Developers = detail.Data.Developers + item.Publishers = detail.Data.Publishers + screenshots := []string{} + for _, screenshot := range detail.Data.Screenshots { + screenshots = append(screenshots, screenshot.PathFull) + } + item.Screenshots = screenshots + return item, nil +} + +func OrganizeGameDownloadWithSteam(id int, game *model.GameDownload) (*model.GameInfo, error) { + var err error + if id == 0 { + id, err = GetSteamIDCache(game.Name) + if err != nil { + return nil, err + } + } + d, err := db.GetGameInfoByPlatformID("steam", id) + if err == nil { + d.GameIDs = append(d.GameIDs, game.ID) + d.GameIDs = utils.Unique(d.GameIDs) + return d, nil + } + detail, err := GenerateGameInfo("steam", id) + if err != nil { + return nil, err + } + detail.GameIDs = append(detail.GameIDs, game.ID) + detail.GameIDs = utils.Unique(detail.GameIDs) + return detail, nil +} + +func GetSteamIDByIGDBID(IGDBID int) (int, error) { + var err error + if TwitchToken == "" { + TwitchToken, err = LoginTwitch() + if err != nil { + return 0, err + } + } + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.IGDBWebsitesURL, + Method: "POST", + Headers: map[string]string{ + "Client-ID": config.Config.Twitch.ClientID, + "Authorization": "Bearer " + TwitchToken, + "User-Agent": "", + "Content-Type": "text/plain", + }, + Data: fmt.Sprintf(`where game = %v; fields *; limit 500;`, IGDBID), + }) + if err != nil { + return 0, err + } + var data []struct { + Game int `json:"game"` + Url string `json:"url"` + } + if err = json.Unmarshal(resp.Data, &data); err != nil { + return 0, err + } + if len(data) == 0 { + return 0, errors.New("Not found") + } + for _, v := range data { + if strings.HasPrefix(v.Url, "https://store.steampowered.com/app/") { + regex := regexp.MustCompile(`https://store.steampowered.com/app/(\d+)/?`) + idStr := regex.FindStringSubmatch(v.Url) + if len(idStr) < 2 { + return 0, errors.New("Failed parse") + } + steamID, err := strconv.Atoi(idStr[1]) + if err != nil { + return 0, err + } + return steamID, nil + } + } + return 0, errors.New("Not found") +} + +func GetSteamIDByIGDBIDCache(IGDBID int) (int, error) { + if config.Config.RedisAvaliable { + key := fmt.Sprintf("steam_game:%d", IGDBID) + val, exist := cache.Get(key) + if exist { + id, err := strconv.Atoi(val) + if err != nil { + return 0, err + } + return id, nil + } else { + id, err := GetSteamIDByIGDBID(IGDBID) + if err != nil { + return 0, err + } + dataBytes := strconv.Itoa(id) + _ = cache.Add(key, dataBytes) + return id, nil + } + } else { + return GetSteamIDByIGDBID(IGDBID) + } +} diff --git a/crawler/steam250.go b/crawler/steam250.go new file mode 100644 index 0000000..a7bab20 --- /dev/null +++ b/crawler/steam250.go @@ -0,0 +1,135 @@ +package crawler + +import ( + "bytes" + "encoding/json" + "fmt" + "pcgamedb/cache" + "pcgamedb/config" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "time" + + "github.com/PuerkitoBio/goquery" +) + +func GetSteam250(url string) ([]*model.GameInfo, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + var rank []model.Steam250Item + var item model.Steam250Item + steamIDs := make([]int, 0) + doc.Find(".appline").Each(func(i int, s *goquery.Selection) { + item.Name = s.Find(".title>a").First().Text() + idStr := s.Find(".store").AttrOr("href", "") + idSlice := regexp.MustCompile(`app/(\d+)/`).FindStringSubmatch(idStr) + if len(idSlice) < 2 { + return + } + item.SteamID, _ = strconv.Atoi(idSlice[1]) + rank = append(rank, item) + steamIDs = append(steamIDs, item.SteamID) + }) + var res []*model.GameInfo + count := 0 + idMap, err := GetIGDBIDBySteamIDsCache(steamIDs) + if err != nil { + return nil, err + } + for _, item := range rank { + if count == 10 { + break + } + if idMap[item.SteamID] != 0 { + info, err := db.GetGameInfoByPlatformID("igdb", idMap[item.SteamID]) + if err == nil { + res = append(res, info) + count++ + continue + } + } else { + info, err := db.GetGameInfoByPlatformID("steam", item.SteamID) + if err == nil { + res = append(res, info) + count++ + continue + } + } + } + return res, nil +} + +func GetSteam250Top250() ([]*model.GameInfo, error) { + return GetSteam250(constant.Steam250Top250URL) +} + +func GetSteam250Top250Cache() ([]*model.GameInfo, error) { + return GetSteam250Cache("top250", GetSteam250Top250) +} + +func GetSteam250BestOfTheYear() ([]*model.GameInfo, error) { + return GetSteam250(fmt.Sprintf(constant.Steam250BestOfTheYearURL, time.Now().UTC().Year())) +} + +func GetSteam250BestOfTheYearCache() ([]*model.GameInfo, error) { + return GetSteam250Cache(fmt.Sprintf("bestoftheyear:%v", time.Now().UTC().Year()), GetSteam250BestOfTheYear) +} + +func GetSteam250WeekTop50() ([]*model.GameInfo, error) { + return GetSteam250(constant.Steam250WeekTop50URL) +} + +func GetSteam250WeekTop50Cache() ([]*model.GameInfo, error) { + return GetSteam250Cache("weektop50", GetSteam250WeekTop50) +} + +func GetSteam250MostPlayed() ([]*model.GameInfo, error) { + return GetSteam250(constant.Steam250MostPlayedURL) +} + +func GetSteam250MostPlayedCache() ([]*model.GameInfo, error) { + return GetSteam250Cache("mostplayed", GetSteam250MostPlayed) +} + +func GetSteam250Cache(k string, f func() ([]*model.GameInfo, error)) ([]*model.GameInfo, error) { + if config.Config.RedisAvaliable { + key := k + val, exist := cache.Get(key) + if exist { + var res []*model.GameInfo + err := json.Unmarshal([]byte(val), &res) + if err != nil { + return nil, err + } + return res, nil + } else { + data, err := f() + if err != nil { + return nil, err + } + dataBytes, err := json.Marshal(data) + if err != nil { + return data, nil + } + err = cache.AddWithExpire(key, dataBytes, 24*time.Hour) + if err != nil { + return data, nil + } + return data, nil + } + } else { + return f() + } +} diff --git a/crawler/steamrip.go b/crawler/steamrip.go new file mode 100644 index 0000000..6771813 --- /dev/null +++ b/crawler/steamrip.go @@ -0,0 +1,137 @@ +package crawler + +import ( + "bytes" + "errors" + "fmt" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type SteamRIPCrawler struct { + logger *zap.Logger +} + +func NewSteamRIPCrawler(logger *zap.Logger) *SteamRIPCrawler { + return &SteamRIPCrawler{ + logger: logger, + } +} + +func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.RawName = strings.TrimSpace(doc.Find(".entry-title").First().Text()) + item.Name = SteamRIPFormatter(item.RawName) + item.Url = url + item.Author = "SteamRIP" + sizeRegex := regexp.MustCompile(`(?i)
  • Game Size:\s?(.*?)
  • `) + sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data)) + if len(sizeRegexRes) != 0 { + item.Size = strings.TrimSpace(sizeRegexRes[1]) + } else { + item.Size = "unknown" + } + megadbRegex := regexp.MustCompile(`(?i)(?:https?:)?(//megadb\.net/[^"]+)`) + megadbRegexRes := megadbRegex.FindStringSubmatch(string(resp.Data)) + if len(megadbRegexRes) != 0 { + item.Download = fmt.Sprintf("https:%s", megadbRegexRes[1]) + } + gofileRegex := regexp.MustCompile(`(?i)(?:https?:)?(//gofile\.io/d/[^"]+)`) + gofileRegexRes := gofileRegex.FindStringSubmatch(string(resp.Data)) + if item.Download == "" && len(gofileRegexRes) != 0 { + item.Download = fmt.Sprintf("https:%s", gofileRegexRes[1]) + } + if item.Download == "" { + return nil, errors.New("Failed to find download link") + } + + return item, nil +} + +func (c *SteamRIPCrawler) Crawl(num int) ([]*model.GameDownload, error) { + count := 0 + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.SteamRIPGameListURL, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + var items []*model.GameDownload + urls := []string{} + updateFlags := []string{} // title + doc.Find(".az-list-item>a").Each(func(i int, s *goquery.Selection) { + u, exist := s.Attr("href") + if !exist { + return + } + urls = append(urls, fmt.Sprintf("%s%s", constant.SteamRIPBaseURL, u)) + updateFlags = append(updateFlags, s.Text()) + }) + for i, u := range urls { + if count == num { + break + } + if db.IsSteamRIPCrawled(updateFlags[i]) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Error("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + item.UpdateFlag = updateFlags[i] + if err := db.SaveGameDownload(item); err != nil { + c.logger.Error("Failed to save item", zap.Error(err)) + continue + } + items = append(items, item) + count++ + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return items, nil +} + +func (c *SteamRIPCrawler) CrawlAll() ([]*model.GameDownload, error) { + return c.Crawl(-1) +} + +func SteamRIPFormatter(name string) string { + name = regexp.MustCompile(`\([^\)]+\)`).ReplaceAllString(name, "") + name = strings.Replace(name, "Free Download", "", -1) + name = strings.TrimSpace(name) + return name +} diff --git a/crawler/xatab.go b/crawler/xatab.go new file mode 100644 index 0000000..624d25a --- /dev/null +++ b/crawler/xatab.go @@ -0,0 +1,218 @@ +package crawler + +import ( + "bytes" + "errors" + "fmt" + "pcgamedb/constant" + "pcgamedb/db" + "pcgamedb/model" + "pcgamedb/utils" + "regexp" + "strconv" + "strings" + + "github.com/PuerkitoBio/goquery" + "go.uber.org/zap" +) + +type XatabCrawler struct { + logger *zap.Logger +} + +func NewXatabCrawler(logger *zap.Logger) *XatabCrawler { + return &XatabCrawler{ + logger: logger, + } +} + +func (c *XatabCrawler) Crawl(page int) ([]*model.GameDownload, error) { + requestURL := fmt.Sprintf("%s/page/%v", constant.XatabBaseURL, page) + resp, err := utils.Fetch(utils.FetchConfig{ + Url: requestURL, + }) + if err != nil { + c.logger.Error("Failed to fetch", zap.Error(err)) + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + c.logger.Error("Failed to parse HTML", zap.Error(err)) + return nil, err + } + urls := []string{} + updateFlags := []string{} // title + doc.Find(".entry").Each(func(i int, s *goquery.Selection) { + u, exist := s.Find(".entry__title.h2 a").Attr("href") + if !exist { + return + } + urls = append(urls, u) + updateFlags = append(updateFlags, s.Find(".entry__title.h2 a").Text()) + }) + var res []*model.GameDownload + for i, u := range urls { + if db.IsXatabCrawled(updateFlags[i]) { + continue + } + c.logger.Info("Crawling", zap.String("URL", u)) + item, err := c.CrawlByUrl(u) + if err != nil { + c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameDownload(item) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err)) + continue + } + res = append(res, item) + info, err := OrganizeGameDownload(item) + if err != nil { + c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u)) + continue + } + err = db.SaveGameInfo(info) + if err != nil { + c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u)) + continue + } + } + return res, nil +} + +func (c *XatabCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: url, + }) + if err != nil { + return nil, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return nil, err + } + item, err := db.GetGameDownloadByUrl(url) + if err != nil { + return nil, err + } + item.Url = url + item.RawName = doc.Find(".inner-entry__title").First().Text() + item.Name = XatabFormatter(item.RawName) + item.Author = "Xatab" + item.UpdateFlag = item.RawName + downloadURL := doc.Find("#download>a").First().AttrOr("href", "") + if downloadURL == "" { + return nil, errors.New("Failed to find download URL") + } + resp, err = utils.Fetch(utils.FetchConfig{ + Headers: map[string]string{"Referer": url}, + Url: downloadURL, + }) + if err != nil { + return nil, err + } + magnet, size, err := utils.ConvertTorrentToMagnet(resp.Data) + if err != nil { + return nil, err + } + item.Size = size + item.Download = magnet + return item, nil +} + +func (c *XatabCrawler) CrawlMulti(pages []int) ([]*model.GameDownload, error) { + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + var res []*model.GameDownload + for _, page := range pages { + if page > totalPageNum { + continue + } + items, err := c.Crawl(page) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *XatabCrawler) CrawlAll() ([]*model.GameDownload, error) { + totalPageNum, err := c.GetTotalPageNum() + if err != nil { + return nil, err + } + var res []*model.GameDownload + for i := 1; i <= totalPageNum; i++ { + items, err := c.Crawl(i) + if err != nil { + return nil, err + } + res = append(res, items...) + } + return res, nil +} + +func (c *XatabCrawler) GetTotalPageNum() (int, error) { + resp, err := utils.Fetch(utils.FetchConfig{ + Url: constant.XatabBaseURL, + }) + if err != nil { + return 0, err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return 0, err + } + pageStr := doc.Find(".pagination>a").Last().Text() + totalPageNum, err := strconv.Atoi(pageStr) + if err != nil { + return 0, err + } + return totalPageNum, nil +} + +var xatabRegexps = []*regexp.Regexp{ + regexp.MustCompile(`(?i)\sPC$`), +} + +func XatabFormatter(name string) string { + reg1 := regexp.MustCompile(`(?i)v(er)?\s?(\.)?\d+(\.\d+)*`) + if index := reg1.FindIndex([]byte(name)); index != nil { + name = name[:index[0]] + } + if index := strings.Index(name, "["); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "("); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "{"); index != -1 { + name = name[:index] + } + if index := strings.Index(name, "+"); index != -1 { + name = name[:index] + } + name = strings.TrimSpace(name) + for _, re := range xatabRegexps { + name = re.ReplaceAllString(name, "") + } + + if index := strings.Index(name, "/"); index != -1 { + names := strings.Split(name, "/") + longestLength := 0 + longestName := "" + for _, n := range names { + if !utils.ContainsRussian(n) && len(n) > longestLength { + longestLength = len(n) + longestName = n + } + } + name = longestName + } + + return strings.TrimSpace(name) +} diff --git a/db/1337x.go b/db/1337x.go new file mode 100644 index 0000000..d2bd444 --- /dev/null +++ b/db/1337x.go @@ -0,0 +1,13 @@ +package db + +import ( + "pcgamedb/model" +) + +func GetDODIGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("dodi") +} + +func GetKaOsKrewGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("kaoskrew") +} diff --git a/db/armgddn.go b/db/armgddn.go new file mode 100644 index 0000000..e1fd53d --- /dev/null +++ b/db/armgddn.go @@ -0,0 +1,11 @@ +package db + +import "pcgamedb/model" + +func IsARMGDDNCrawled(flag string) bool { + return IsGameCrawled(flag, "armgddn") +} + +func GetARMGDDNGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("armgddn") +} diff --git a/db/chovka.go b/db/chovka.go new file mode 100644 index 0000000..9c16f28 --- /dev/null +++ b/db/chovka.go @@ -0,0 +1,5 @@ +package db + +func IsChovkaCrawled(flag string) bool { + return IsGameCrawled(flag, "chovka") +} diff --git a/db/custom_collection.go b/db/custom_collection.go new file mode 100644 index 0000000..1e38a9c --- /dev/null +++ b/db/custom_collection.go @@ -0,0 +1,77 @@ +package db + +import ( + "context" + "pcgamedb/config" + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type CustomCollection struct { + collName string + coll *mongo.Collection +} + +func (c *CustomCollection) Find(ctx context.Context, filter interface{}, + opts ...*options.FindOptions) (cur *mongo.Cursor, err error) { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.Find(ctx, filter, opts...) +} + +func (c *CustomCollection) FindOne(ctx context.Context, filter interface{}, + opts ...*options.FindOneOptions) *mongo.SingleResult { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.FindOne(ctx, filter, opts...) +} + +func (c *CustomCollection) UpdateOne(ctx context.Context, filter interface{}, update interface{}, + opts ...*options.UpdateOptions) (*mongo.UpdateResult, error) { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.UpdateOne(ctx, filter, update, opts...) +} + +func (c *CustomCollection) Aggregate(ctx context.Context, pipeline interface{}, + opts ...*options.AggregateOptions) (*mongo.Cursor, error) { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.Aggregate(ctx, pipeline, opts...) +} + +func (c *CustomCollection) DeleteOne(ctx context.Context, filter interface{}, + opts ...*options.DeleteOptions) (*mongo.DeleteResult, error) { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.DeleteOne(ctx, filter, opts...) +} + +func (c *CustomCollection) DeleteMany(ctx context.Context, filter interface{}, + opts ...*options.DeleteOptions) (*mongo.DeleteResult, error) { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.DeleteMany(ctx, filter, opts...) +} + +func (c *CustomCollection) CountDocuments(ctx context.Context, filter interface{}, + opts ...*options.CountOptions) (int64, error) { + CheckConnect() + if c.coll == nil { + c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName) + } + return c.coll.CountDocuments(ctx, filter, opts...) +} diff --git a/db/db.go b/db/db.go new file mode 100644 index 0000000..8018f24 --- /dev/null +++ b/db/db.go @@ -0,0 +1,120 @@ +package db + +import ( + "context" + "fmt" + "pcgamedb/config" + "pcgamedb/log" + "sync" + "time" + + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.uber.org/zap" +) + +const ( + gameDownloadCollectionName = "game_downloads" + gameInfoCollectionName = "game_infos" +) + +var ( + mongoDB *mongo.Client + mutx = &sync.RWMutex{} + GameDownloadCollection = &CustomCollection{ + collName: gameDownloadCollectionName, + } + GameInfoCollection = &CustomCollection{ + collName: gameInfoCollectionName, + } +) + +func connect() { + if !config.Config.DatabaseAvaliable { + log.Logger.Panic("Missing database configuration information") + return + } + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + clientOptions := options.Client().ApplyURI(fmt.Sprintf( + "mongodb://%s:%s@%s:%v", + config.Config.Database.User, + config.Config.Database.Password, + config.Config.Database.Host, + config.Config.Database.Port, + )) + client, err := mongo.Connect(ctx, clientOptions) + if err != nil { + log.Logger.Panic("Failed to connect to MongoDB", zap.Error(err)) + } + ctx, cancel = context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + err = client.Ping(ctx, nil) + if err != nil { + log.Logger.Panic("Failed to ping MongoDB", zap.Error(err)) + } + log.Logger.Info("Connected to MongoDB") + mongoDB = client + + gameDownloadCollection := mongoDB.Database(config.Config.Database.Database).Collection(gameDownloadCollectionName) + gameInfoCollection := mongoDB.Database(config.Config.Database.Database).Collection(gameInfoCollectionName) + + nameIndex := mongo.IndexModel{ + Keys: bson.D{ + {Key: "name", Value: 1}, + }, + } + authorIndex := mongo.IndexModel{ + Keys: bson.D{ + {Key: "author", Value: 1}, + }, + } + gamesIndex := mongo.IndexModel{ + Keys: bson.D{ + {Key: "games", Value: 1}, + }, + } + searchIndex := mongo.IndexModel{ + Keys: bson.D{{Key: "name", Value: "text"}, {Key: "aliases", Value: "text"}}, + } + ctx, cancel = context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + _, err = gameDownloadCollection.Indexes().CreateOne(ctx, nameIndex) + if err != nil { + log.Logger.Error("Failed to create index", zap.Error(err)) + } + _, err = gameDownloadCollection.Indexes().CreateOne(ctx, authorIndex) + if err != nil { + log.Logger.Error("Failed to create index", zap.Error(err)) + } + _, err = gameInfoCollection.Indexes().CreateOne(ctx, gamesIndex) + if err != nil { + log.Logger.Error("Failed to create index", zap.Error(err)) + } + _, err = gameInfoCollection.Indexes().CreateOne(ctx, searchIndex) + if err != nil { + log.Logger.Error("Failed to create index", zap.Error(err)) + } +} + +func CheckConnect() { + mutx.RLock() + if mongoDB != nil { + mutx.RUnlock() + return + } + mutx.RUnlock() + + mutx.Lock() + if mongoDB == nil { + connect() + } + mutx.Unlock() +} + +func HealthCheck() error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + return mongoDB.Ping(ctx, nil) +} diff --git a/db/fitgirl.go b/db/fitgirl.go new file mode 100644 index 0000000..70702c5 --- /dev/null +++ b/db/fitgirl.go @@ -0,0 +1,11 @@ +package db + +import "pcgamedb/model" + +func GetFitgirlAllGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("fitgirl") +} + +func IsFitgirlCrawled(flag string) bool { + return IsGameCrawled(flag, "armgddn") +} diff --git a/db/freegog.go b/db/freegog.go new file mode 100644 index 0000000..c1fd8bc --- /dev/null +++ b/db/freegog.go @@ -0,0 +1,12 @@ +package db + +import ( + "pcgamedb/model" +) + +func GetFreeGOGGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("freegog") +} +func IsFreeGOGCrawled(flag string) bool { + return IsGameCrawled(flag, "freegog") +} diff --git a/db/game.go b/db/game.go new file mode 100644 index 0000000..b769b8d --- /dev/null +++ b/db/game.go @@ -0,0 +1,733 @@ +package db + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "pcgamedb/cache" + "pcgamedb/config" + "pcgamedb/model" + "regexp" + "slices" + "strings" + "time" + + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var ( + removeDelimiter = regexp.MustCompile(`[:\-\+]`) + removeRepeatingSpacesRegex = regexp.MustCompile(`\s+`) +) + +func GetGameDownloadsByAuthor(regex string) ([]*model.GameDownload, error) { + var res []*model.GameDownload + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.D{{Key: "author", Value: primitive.Regex{Pattern: regex, Options: "i"}}} + cursor, err := GameDownloadCollection.Find(ctx, filter) + if err != nil { + return nil, err + } + defer cursor.Close(ctx) + if cursor.Err() != nil { + return nil, cursor.Err() + } + if err = cursor.All(ctx, &res); err != nil { + return nil, err + } + return res, err +} + +func GetGameDownloadsByAuthorPagination(regex string, page int, pageSize int) ([]*model.GameDownload, int, error) { + var res []*model.GameDownload + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.D{{Key: "author", Value: primitive.Regex{Pattern: regex, Options: "i"}}} + opts := options.Find() + opts.SetSkip(int64((page - 1) * pageSize)) + opts.SetLimit(int64(pageSize)) + totalCount, err := GameDownloadCollection.CountDocuments(ctx, filter) + if err != nil { + return nil, 0, err + } + totalPage := (totalCount + int64(pageSize) - 1) / int64(pageSize) + cursor, err := GameDownloadCollection.Find(ctx, filter, opts) + if err != nil { + return nil, 0, err + } + defer cursor.Close(ctx) + if cursor.Err() != nil { + return nil, 0, cursor.Err() + } + if err = cursor.All(ctx, &res); err != nil { + return nil, 0, err + } + return res, int(totalPage), err +} + +func IsGameCrawled(flag string, author string) bool { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.D{ + {Key: "author", Value: primitive.Regex{Pattern: author, Options: "i"}}, + {Key: "update_flag", Value: flag}, + } + var game model.GameDownload + err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game) + if err != nil { + if errors.Is(mongo.ErrNoDocuments, err) { + return false + } + return false + } + return true +} + +func IsGameCrawledByURL(url string) bool { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.D{ + {Key: "url", Value: url}, + } + var game model.GameDownload + err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game) + if err != nil { + if errors.Is(mongo.ErrNoDocuments, err) { + return false + } + return false + } + return true +} + +func SaveGameDownload(item *model.GameDownload) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + if item.ID.IsZero() { + item.ID = primitive.NewObjectID() + } + if item.CreatedAt.IsZero() { + item.CreatedAt = time.Now() + } + item.UpdatedAt = time.Now() + item.Size = strings.Replace(item.Size, "gb", "GB", -1) + item.Size = strings.Replace(item.Size, "mb", "MB", -1) + filter := bson.M{"_id": item.ID} + update := bson.M{"$set": item} + opts := options.Update().SetUpsert(true) + _, err := GameDownloadCollection.UpdateOne(ctx, filter, update, opts) + if err != nil { + return err + } + return nil +} + +func SaveGameInfo(item *model.GameInfo) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + if item.ID.IsZero() { + item.ID = primitive.NewObjectID() + } + if item.CreatedAt.IsZero() { + item.CreatedAt = time.Now() + } + item.UpdatedAt = time.Now() + filter := bson.M{"_id": item.ID} + update := bson.M{"$set": item} + opts := options.Update().SetUpsert(true) + _, err := GameInfoCollection.UpdateOne(ctx, filter, update, opts) + if err != nil { + return err + } + return nil +} + +func GetAllGameDownloads() ([]*model.GameDownload, error) { + var items []*model.GameDownload + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + cursor, err := GameDownloadCollection.Find(ctx, bson.D{}) + if err != nil { + return nil, err + } + defer cursor.Close(ctx) + for cursor.Next(ctx) { + var game model.GameDownload + if err = cursor.Decode(&game); err != nil { + return nil, err + } + items = append(items, &game) + } + if cursor.Err() != nil { + return nil, cursor.Err() + } + return items, err +} + +func GetGameDownloadByUrl(url string) (*model.GameDownload, error) { + var item model.GameDownload + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.M{"url": url} + err := GameDownloadCollection.FindOne(ctx, filter).Decode(&item) + if err != nil { + if errors.Is(mongo.ErrNoDocuments, err) { + return &model.GameDownload{}, nil + } + return nil, err + } + return &item, nil +} + +func GetGameDownloadByID(id primitive.ObjectID) (*model.GameDownload, error) { + var item model.GameDownload + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.M{"_id": id} + err := GameDownloadCollection.FindOne(ctx, filter).Decode(&item) + if err != nil { + return nil, err + } + return &item, nil +} + +func GetGameDownloadsByIDs(ids []primitive.ObjectID) ([]*model.GameDownload, error) { + var items []*model.GameDownload + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + cursor, err := GameDownloadCollection.Find(ctx, bson.M{"_id": bson.M{"$in": ids}}) + if err != nil { + return nil, err + } + defer cursor.Close(ctx) + for cursor.Next(ctx) { + var game model.GameDownload + if err = cursor.Decode(&game); err != nil { + return nil, err + } + items = append(items, &game) + } + if cursor.Err() != nil { + return nil, cursor.Err() + } + return items, err +} + +func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, int, error) { + var items []*model.GameInfo + name = removeDelimiter.ReplaceAllString(name, " ") + name = removeRepeatingSpacesRegex.ReplaceAllString(name, " ") + name = strings.TrimSpace(name) + name = strings.Replace(name, " ", ".*", -1) + name = fmt.Sprintf("%s.*", name) + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + filter := bson.M{"$or": []interface{}{ + bson.M{"name": bson.M{"$regex": primitive.Regex{Pattern: name, Options: "i"}}}, + bson.M{"aliases": bson.M{"$regex": primitive.Regex{Pattern: name, Options: "i"}}}, + }} + totalCount, err := GameInfoCollection.CountDocuments(ctx, filter) + if err != nil { + return nil, 0, err + } + totalPage := (totalCount + int64(pageSize) - 1) / int64(pageSize) + findOpts := options.Find().SetSkip(int64((page - 1) * pageSize)).SetLimit(int64(pageSize)).SetSort(bson.D{{Key: "name", Value: 1}}) + + cursor, err := GameInfoCollection.Find(ctx, filter, findOpts) + if err != nil { + return nil, 0, err + } + defer cursor.Close(ctx) + for cursor.Next(ctx) { + var game model.GameInfo + if err = cursor.Decode(&game); err != nil { + return nil, 0, err + } + game.Games, err = GetGameDownloadsByIDs(game.GameIDs) + if err != nil { + return nil, 0, err + } + items = append(items, &game) + } + if err := cursor.Err(); err != nil { + return nil, 0, err + } + return items, int(totalPage), nil +} + +func SearchGameInfosCache(name string, page int, pageSize int) ([]*model.GameInfo, int, error) { + type res struct { + Items []*model.GameInfo + TotalPage int + } + name = strings.ToLower(name) + if config.Config.RedisAvaliable { + key := fmt.Sprintf("searchGameDetails:%s:%d:%d", name, page, pageSize) + val, exist := cache.Get(key) + if exist { + var data res + err := json.Unmarshal([]byte(val), &data) + if err != nil { + return nil, 0, err + } + return data.Items, data.TotalPage, nil + } else { + data, totalPage, err := SearchGameInfos(name, page, pageSize) + if err != nil { + return nil, 0, err + } + dataBytes, err := json.Marshal(res{Items: data, TotalPage: totalPage}) + if err != nil { + return nil, 0, err + } + _ = cache.AddWithExpire(key, string(dataBytes), 12*time.Hour) + return data, totalPage, nil + } + } else { + return SearchGameInfos(name, page, pageSize) + } +} + +func GetGameInfoByPlatformID(platform string, id int) (*model.GameInfo, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + var filter interface{} + switch platform { + case "steam": + filter = bson.M{"steam_id": id} + case "gog": + filter = bson.M{"gog_id": id} + case "igdb": + filter = bson.M{"igdb_id": id} + } + var game model.GameInfo + err := GameInfoCollection.FindOne(ctx, filter).Decode(&game) + if err != nil { + return nil, err + } + return &game, nil +} + +func GetUnorganizedGameDownloads(num int) ([]*model.GameDownload, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + var gamesNotInDetails []*model.GameDownload + pipeline := mongo.Pipeline{ + bson.D{{Key: "$lookup", Value: bson.D{ + {Key: "from", Value: "game_infos"}, + {Key: "localField", Value: "_id"}, + {Key: "foreignField", Value: "games"}, + {Key: "as", Value: "gameDetail"}, + }}}, + } + if num != -1 && num > 0 { + pipeline = append(pipeline, bson.D{{Key: "$limit", Value: num}}) + } + pipeline = append(pipeline, + bson.D{{Key: "$match", Value: bson.D{ + {Key: "gameDetail", Value: bson.D{{Key: "$size", Value: 0}}}, + }}}, + bson.D{{Key: "$sort", Value: bson.D{{Key: "name", Value: 1}}}}, + ) + + cursor, err := GameDownloadCollection.Aggregate(ctx, pipeline) + if err != nil { + return nil, err + } + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + var game model.GameDownload + if err := cursor.Decode(&game); err != nil { + return nil, err + } + gamesNotInDetails = append(gamesNotInDetails, &game) + } + + if err := cursor.Err(); err != nil { + return nil, err + } + + return gamesNotInDetails, nil +} + +func GetGameInfoByID(id primitive.ObjectID) (*model.GameInfo, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + var game model.GameInfo + err := GameInfoCollection.FindOne(ctx, bson.M{"_id": id}).Decode(&game) + if err != nil { + return nil, err + } + return &game, nil +} + +func DeduplicateGames() ([]primitive.ObjectID, error) { + type queryRes struct { + ID string `bson:"_id"` + Total int `bson:"total"` + IDs []primitive.ObjectID `bson:"ids"` + } + + var res []primitive.ObjectID + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + var qres []queryRes + pipeline := mongo.Pipeline{ + bson.D{{Key: "$group", Value: bson.D{ + {Key: "_id", Value: "$download"}, + {Key: "total", Value: bson.D{{Key: "$sum", Value: 1}}}, + {Key: "ids", Value: bson.D{{Key: "$push", Value: "$_id"}}}, + }}}, + bson.D{{Key: "$match", Value: bson.D{ + {Key: "total", Value: bson.D{{Key: "$gt", Value: 1}}}, + }}}, + } + cursor, err := GameDownloadCollection.Aggregate(ctx, pipeline) + if err != nil { + return nil, err + } + if err = cursor.All(ctx, &qres); err != nil { + return nil, err + } + for _, item := range qres { + idsToDelete := item.IDs[1:] + res = append(res, idsToDelete...) + _, err = GameDownloadCollection.DeleteMany(ctx, bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: idsToDelete}}}}) + if err != nil { + return nil, err + } + cursor, err := GameInfoCollection.Find(ctx, bson.M{"games": bson.M{"$in": idsToDelete}}) + if err != nil { + return nil, err + } + var infos []*model.GameInfo + if err := cursor.All(ctx, &infos); err != nil { + return nil, err + } + for _, info := range infos { + newGames := make([]primitive.ObjectID, 0, len(info.GameIDs)) + for _, id := range info.GameIDs { + if !slices.Contains(idsToDelete, id) { + newGames = append(newGames, id) + } + } + info.GameIDs = newGames + if err := SaveGameInfo(info); err != nil { + return nil, err + } + } + } + _, _ = CleanOrphanGamesInGameInfos() + return res, nil +} + +func CleanOrphanGamesInGameInfos() (map[primitive.ObjectID]primitive.ObjectID, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + pipeline := mongo.Pipeline{ + bson.D{{Key: "$unwind", Value: "$games"}}, + bson.D{{Key: "$lookup", Value: bson.D{ + {Key: "from", Value: "game_downloads"}, + {Key: "localField", Value: "games"}, + {Key: "foreignField", Value: "_id"}, + {Key: "as", Value: "gameDownloads"}, + }}}, + bson.D{{Key: "$match", Value: bson.D{ + {Key: "gameDownloads", Value: bson.D{{Key: "$size", Value: 0}}}, + }}}, + bson.D{{Key: "$project", Value: bson.D{ + {Key: "_id", Value: 1}, + {Key: "game", Value: "$games"}, + }}}, + } + cursor, err := GameInfoCollection.Aggregate(ctx, pipeline) + if err != nil { + return nil, err + } + qres := make([]struct { + ID primitive.ObjectID `bson:"_id"` + Game primitive.ObjectID `bson:"game"` + }, 0) + if err := cursor.All(ctx, &qres); err != nil { + return nil, err + } + var res = make(map[primitive.ObjectID]primitive.ObjectID) + for _, item := range qres { + info, err := GetGameInfoByID(item.ID) + if err != nil { + continue + } + newGames := make([]primitive.ObjectID, 0, len(info.GameIDs)) + for _, id := range info.GameIDs { + if id != item.Game { + newGames = append(newGames, id) + } + } + info.GameIDs = newGames + if err := SaveGameInfo(info); err != nil { + return nil, err + } + res[item.ID] = item.Game + } + return res, nil +} + +func CleanGameInfoWithEmptyGameIDs() ([]primitive.ObjectID, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.M{"games": bson.M{"$size": 0}} + cursor, err := GameInfoCollection.Find(ctx, filter) + if err != nil { + return nil, err + } + var games []*model.GameInfo + var res []primitive.ObjectID + if err = cursor.All(ctx, &games); err != nil { + return nil, err + } + for _, item := range games { + res = append(res, item.ID) + } + _, err = GameInfoCollection.DeleteMany(ctx, filter) + if err != nil { + return nil, err + } + return res, nil +} + +func GetGameInfosByName(name string) ([]*model.GameInfo, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + name = strings.TrimSpace(name) + name = fmt.Sprintf("^%s$", name) + filter := bson.M{"name": bson.M{"$regex": primitive.Regex{Pattern: name, Options: "i"}}} + cursor, err := GameInfoCollection.Find(ctx, filter) + if err != nil { + return nil, err + } + var games []*model.GameInfo + if err = cursor.All(ctx, &games); err != nil { + return nil, err + } + return games, nil +} + +func GetGameDownloadByRawName(name string) ([]*model.GameDownload, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + name = strings.TrimSpace(name) + name = fmt.Sprintf("^%s$", name) + filter := bson.M{"raw_name": bson.M{"$regex": primitive.Regex{Pattern: name, Options: "i"}}} + cursor, err := GameDownloadCollection.Find(ctx, filter) + if err != nil { + return nil, err + } + var game []*model.GameDownload + if err = cursor.All(ctx, &game); err != nil { + return nil, err + } + return game, nil +} + +func GetSameNameGameInfos() (map[string][]primitive.ObjectID, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + pipeline := mongo.Pipeline{ + bson.D{{Key: "$group", Value: bson.D{ + {Key: "_id", Value: "$name"}, + {Key: "count", Value: bson.D{{Key: "$sum", Value: 1}}}, + {Key: "ids", Value: bson.D{{Key: "$addToSet", Value: "$_id"}}}, + }}}, + bson.D{{Key: "$match", Value: bson.D{{Key: "count", Value: bson.D{{Key: "$gt", Value: 1}}}}}}, + } + cursor, err := GameInfoCollection.Aggregate(ctx, pipeline) + if err != nil { + return nil, err + } + data := make([]struct { + Name string `bson:"_id"` + Count int `bson:"count"` + IDs []primitive.ObjectID `bson:"ids"` + }, 0) + if err := cursor.All(ctx, &data); err != nil { + return nil, err + } + res := make(map[string][]primitive.ObjectID) + for _, item := range data { + res[item.Name] = item.IDs + } + return res, nil +} + +func MergeSameNameGameInfos() error { + games, err := GetSameNameGameInfos() + if err != nil { + return err + } + for _, ids := range games { + var IGDBItem *model.GameInfo = nil + otherPlatformItems := make([]*model.GameInfo, 0) + skip := false + for _, id := range ids { + item, err := GetGameInfoByID(id) + if err != nil { + continue + } + if item.IGDBID != 0 { + if IGDBItem == nil { + IGDBItem = item + } else { + skip = true + break + // skip if there are multiple items with IGDB ID + // not sure which item is correct + // need deal manually + } + } else { + otherPlatformItems = append(otherPlatformItems, item) + } + } + if skip { + continue + } + if IGDBItem != nil { + for _, item := range otherPlatformItems { + IGDBItem.GameIDs = append(IGDBItem.GameIDs, item.ID) + } + if err := SaveGameInfo(IGDBItem); err != nil { + continue + } + } + } + return nil +} + +func GetGameInfoCount() (int64, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + count, err := GameInfoCollection.CountDocuments(ctx, bson.M{}) + if err != nil { + return 0, err + } + return count, nil +} + +func GetGameDownloadCount() (int64, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + count, err := GameDownloadCollection.CountDocuments(ctx, bson.M{}) + if err != nil { + return 0, err + } + return count, nil +} + +func GetGameInfoWithSteamID() ([]*model.GameInfo, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + filter := bson.M{"$and": []bson.M{ + {"steam_id": bson.M{"$exists": 1}}, + {"steam_id": bson.M{"$ne": 0}}, + }} + + cursor, err := GameInfoCollection.Find(ctx, filter) + if err != nil { + return nil, err + } + var games []*model.GameInfo + if err = cursor.All(ctx, &games); err != nil { + return nil, err + } + return games, nil +} + +func DeleteGameInfoByID(id primitive.ObjectID) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + _, err := GameInfoCollection.DeleteOne(ctx, bson.M{"_id": id}) + if err != nil { + return err + } + return nil +} + +func DeleteGameDownloadByID(id primitive.ObjectID) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + _, err := GameDownloadCollection.DeleteOne(ctx, bson.M{"_id": id}) + if err != nil { + return err + } + filter := bson.M{"games": bson.M{"$in": []primitive.ObjectID{id}}} + cursor, err := GameInfoCollection.Find(ctx, filter) + if err != nil { + return err + } + var games []*model.GameInfo + if err = cursor.All(ctx, &games); err != nil { + return err + } + for _, game := range games { + newIDs := make([]primitive.ObjectID, 0) + for _, gameID := range game.GameIDs { + if gameID != id { + newIDs = append(newIDs, gameID) + } + } + game.GameIDs = newIDs + if err := SaveGameInfo(game); err != nil { + continue + } + } + return nil +} + +func GetAllAuthors() ([]string, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + pipeline := mongo.Pipeline{ + bson.D{{Key: "$group", Value: bson.D{ + {Key: "_id", Value: "$author"}, + }}}, + } + + cursor, err := GameDownloadCollection.Aggregate(ctx, pipeline) + if err != nil { + return nil, err + } + var authors []struct { + Author string `bson:"_id"` + } + if err = cursor.All(ctx, &authors); err != nil { + return nil, err + } + var res []string + for _, author := range authors { + res = append(res, author.Author) + } + return res, nil +} + +func GetAllGameInfos() ([]*model.GameInfo, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + cursor, err := GameInfoCollection.Find(ctx, bson.M{}) + if err != nil { + return nil, err + } + var res []*model.GameInfo + if err = cursor.All(ctx, &res); err != nil { + return nil, err + } + return res, nil +} diff --git a/db/gnarly.go b/db/gnarly.go new file mode 100644 index 0000000..b1b57a0 --- /dev/null +++ b/db/gnarly.go @@ -0,0 +1,5 @@ +package db + +func IsGnarlyCrawled(flag string) bool { + return IsGameCrawled(flag, "gnarly") +} diff --git a/db/onlinefix.go b/db/onlinefix.go new file mode 100644 index 0000000..db84166 --- /dev/null +++ b/db/onlinefix.go @@ -0,0 +1,13 @@ +package db + +import ( + "pcgamedb/model" +) + +func GetOnlineFixGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("onlinefix") +} + +func IsOnlineFixCrawled(flag string) bool { + return IsGameCrawled(flag, "onlinefix") +} diff --git a/db/steamrip.go b/db/steamrip.go new file mode 100644 index 0000000..5e8f0bc --- /dev/null +++ b/db/steamrip.go @@ -0,0 +1,5 @@ +package db + +func IsSteamRIPCrawled(flag string) bool { + return IsGameCrawled(flag, "SteamRIP") +} diff --git a/db/xatab.go b/db/xatab.go new file mode 100644 index 0000000..4d737a9 --- /dev/null +++ b/db/xatab.go @@ -0,0 +1,13 @@ +package db + +import ( + "pcgamedb/model" +) + +func GetXatabGameDownloads() ([]*model.GameDownload, error) { + return GetGameDownloadsByAuthor("xatab") +} + +func IsXatabCrawled(flag string) bool { + return IsGameCrawled(flag, "xatab") +} diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..d4e3c0e --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,35 @@ +services: + pcgamedb: + build: . + container_name: pcgamedb + restart: unless-stopped + ports: + - 127.0.0.1:8080:8080 + environment: + - LOG_LEVEL=info + - SERVER_PORT=8080 + - DATABASE_HOST=pcgamedb-mongodb + - DATABASE_PORT=27017 + - DATABASE_USER=root + - DATABASE_PASSWORD=password + - DATABASE_NAME=pcgamedb + - REDIS_HOST=pcgamedb-redis + - REDIS_PORT=6379 + - REDIS_DB=0 + # Read more about environment variables: config/config.go + pcgamedb-mongodb: + container_name: pcgamedb-mongodb + image: mongo:latest + restart: unless-stopped + environment: + MONGO_INITDB_ROOT_USERNAME: root + MONGO_INITDB_ROOT_PASSWORD: password + volumes: + - ./mongodb:/data/db + pcgamedb-redis: + image: redis:latest + container_name: pcgamedb-redis + volumes: + - ./redis:/data + command: redis-server --appendonly yes + restart: unless-stopped diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..a9a04a1 --- /dev/null +++ b/go.mod @@ -0,0 +1,85 @@ +module pcgamedb + +go 1.21.5 + +require ( + github.com/PuerkitoBio/goquery v1.9.2 + github.com/anacrolix/torrent v1.55.0 + github.com/bogdanfinn/fhttp v0.5.28 + github.com/bogdanfinn/tls-client v1.7.5 + github.com/btcsuite/btcutil v1.0.2 + github.com/gin-contrib/cors v1.7.2 + github.com/gin-gonic/gin v1.10.0 + github.com/jlaffaye/ftp v0.2.0 + github.com/redis/go-redis/v9 v9.5.2 + github.com/robfig/cron/v3 v3.0.0 + github.com/spf13/cobra v1.8.0 + github.com/swaggo/files v1.0.1 + github.com/swaggo/gin-swagger v1.6.0 + github.com/swaggo/swag v1.16.3 + go.mongodb.org/mongo-driver v1.16.0 + go.uber.org/zap v1.27.0 + golang.org/x/crypto v0.24.0 + golang.org/x/net v0.26.0 + gopkg.in/natefinch/lumberjack.v2 v2.2.1 +) + +require ( + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/anacrolix/missinggo v1.3.0 // indirect + github.com/anacrolix/missinggo/v2 v2.7.3 // indirect + github.com/andybalholm/brotli v1.0.5 // indirect + github.com/andybalholm/cascadia v1.3.2 // indirect + github.com/bogdanfinn/utls v1.6.1 // indirect + github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect + github.com/bytedance/sonic v1.11.9 // indirect + github.com/bytedance/sonic/loader v0.1.1 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cloudflare/circl v1.3.6 // indirect + github.com/cloudwego/base64x v0.1.4 // indirect + github.com/cloudwego/iasm v0.2.0 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/gabriel-vasile/mimetype v1.4.4 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.22.0 // indirect + github.com/goccy/go-json v0.10.3 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/huandu/xstrings v1.3.2 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.16.7 // indirect + github.com/klauspost/cpuid/v2 v2.2.8 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.2 // indirect + github.com/quic-go/quic-go v0.37.4 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.12 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/arch v0.8.0 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/sys v0.22.0 // indirect + golang.org/x/text v0.16.0 // indirect + golang.org/x/tools v0.22.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..16ae865 --- /dev/null +++ b/go.sum @@ -0,0 +1,487 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +crawshaw.io/iox v0.0.0-20181124134642-c51c3df30797/go.mod h1:sXBiorCo8c46JlQV3oXPKINnZ8mcqnye1EkVkqsectk= +crawshaw.io/sqlite v0.3.2/go.mod h1:igAO5JulrQ1DbdZdtVq48mnZUBAPOeFzer7VhDWNtW4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE= +github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk= +github.com/RoaringBitmap/roaring v0.4.7/go.mod h1:8khRDP4HmeXns4xIj9oGrKSz7XTQiJx2zgh7AcNke4w= +github.com/RoaringBitmap/roaring v0.4.17/go.mod h1:D3qVegWTmfCaX4Bl5CrBE9hfrSrrXIr8KVNvRsDi1NI= +github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444 h1:8V0K09lrGoeT2KRJNOtspA7q+OMxGwQqK/Ug0IiaaRE= +github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444/go.mod h1:MctKM1HS5YYDb3F30NGJxLE+QPuqWoT5ReW/4jt8xew= +github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c= +github.com/anacrolix/envpprof v1.0.0/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c= +github.com/anacrolix/envpprof v1.1.0/go.mod h1:My7T5oSqVfEn4MD4Meczkw/f5lSIndGAKu/0SM/rkf4= +github.com/anacrolix/log v0.3.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU= +github.com/anacrolix/log v0.6.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU= +github.com/anacrolix/missinggo v1.1.0/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo= +github.com/anacrolix/missinggo v1.1.2-0.20190815015349-b888af804467/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo= +github.com/anacrolix/missinggo v1.2.1/go.mod h1:J5cMhif8jPmFoC3+Uvob3OXXNIhOUikzMt+uUjeM21Y= +github.com/anacrolix/missinggo v1.3.0 h1:06HlMsudotL7BAELRZs0yDZ4yVXsHXGi323QBjAVASw= +github.com/anacrolix/missinggo v1.3.0/go.mod h1:bqHm8cE8xr+15uVfMG3BFui/TxyB6//H5fwlq/TeqMc= +github.com/anacrolix/missinggo/perf v1.0.0/go.mod h1:ljAFWkBuzkO12MQclXzZrosP5urunoLS0Cbvb4V0uMQ= +github.com/anacrolix/missinggo/v2 v2.2.0/go.mod h1:o0jgJoYOyaoYQ4E2ZMISVa9c88BbUBVQQW4QeRkNCGY= +github.com/anacrolix/missinggo/v2 v2.5.1/go.mod h1:WEjqh2rmKECd0t1VhQkLGTdIWXO6f6NLjp5GlMZ+6FA= +github.com/anacrolix/missinggo/v2 v2.7.3 h1:Ee//CmZBMadeNiYB/hHo9ly2PFOEZ4Fhsbnug3rDAIE= +github.com/anacrolix/missinggo/v2 v2.7.3/go.mod h1:mIEtp9pgaXqt8VQ3NQxFOod/eQ1H0D1XsZzKUQfwtac= +github.com/anacrolix/stm v0.2.0/go.mod h1:zoVQRvSiGjGoTmbM0vSLIiaKjWtNPeTvXUSdJQA4hsg= +github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw= +github.com/anacrolix/tagflag v1.0.0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw= +github.com/anacrolix/tagflag v1.1.0/go.mod h1:Scxs9CV10NQatSmbyjqmqmeQNwGzlNe0CMUMIxqHIG8= +github.com/anacrolix/torrent v1.55.0 h1:s9yh/YGdPmbN9dTa+0Inh2dLdrLQRvEAj1jdFW/Hdd8= +github.com/anacrolix/torrent v1.55.0/go.mod h1:sBdZHBSZNj4de0m+EbYg7vvs/G/STubxu/GzzNbojsE= +github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= +github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= +github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/benbjohnson/immutable v0.2.0/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bogdanfinn/fhttp v0.5.28 h1:G6thT8s8v6z1IuvXMUsX9QKy3ZHseTQTzxuIhSiaaAw= +github.com/bogdanfinn/fhttp v0.5.28/go.mod h1:oJiYPG3jQTKzk/VFmogH8jxjH5yiv2rrOH48Xso2lrE= +github.com/bogdanfinn/tls-client v1.7.5 h1:R1aTwe5oja5niLnQggzbWnzJEssw9n+3O4kR0H/Tjl4= +github.com/bogdanfinn/tls-client v1.7.5/go.mod h1:pQwF0eqfL0gf0mu8hikvu6deZ3ijSPruJDzEKEnnXjU= +github.com/bogdanfinn/utls v1.6.1 h1:dKDYAcXEyFFJ3GaWaN89DEyjyRraD1qb4osdEK89ass= +github.com/bogdanfinn/utls v1.6.1/go.mod h1:VXIbRZaiY/wHZc6Hu+DZ4O2CgTzjhjCg/Ou3V4r/39Y= +github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo= +github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo= +github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8= +github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= +github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= +github.com/btcsuite/btcutil v1.0.2 h1:9iZ1Terx9fMIOtq1VrwdqfsATL9MC2l8ZrUY6YZ2uts= +github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts= +github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= +github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= +github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= +github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= +github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= +github.com/bytedance/sonic v1.11.9 h1:LFHENlIY/SLzDWverzdOvgMztTxcfcF+cqNsz9pK5zg= +github.com/bytedance/sonic v1.11.9/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= +github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= +github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.3.6 h1:/xbKIqSHbZXHwkhbrhrt2YOHIwYJlXH94E3tI/gDlUg= +github.com/cloudflare/circl v1.3.6/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= +github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= +github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= +github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= +github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I= +github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s= +github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw= +github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E= +github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= +github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= +github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= +github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= +github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= +github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= +github.com/glycerine/goconvey v0.0.0-20190315024820-982ee783a72e/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= +github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao= +github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20190309154008-847fc94819f9/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= +github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= +github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= +github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg= +github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= +github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= +github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo/v2 v2.9.5 h1:+6Hr4uxzP4XIUyAkg61dWBw8lb/gc4/X5luuxN/EC+Q= +github.com/onsi/ginkgo/v2 v2.9.5/go.mod h1:tvAoo1QUJwNEU2ITftXTpR7R1RbCzoZUOs3RonqW57k= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= +github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/quic-go/quic-go v0.37.4 h1:ke8B73yMCWGq9MfrCCAw0Uzdm7GaViC3i39dsIdDlH4= +github.com/quic-go/quic-go v0.37.4/go.mod h1:YsbH1r4mSHPJcLF4k4zruUkLBqctEMBDR6VPvcYjIsU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/redis/go-redis/v9 v9.5.2 h1:L0L3fcSNReTRGyZ6AqAEN0K56wYeYAwapBIhkvh0f3E= +github.com/redis/go-redis/v9 v9.5.2/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= +github.com/robfig/cron/v3 v3.0.0 h1:kQ6Cb7aHOHTSzNVNEhmp8EcWKLb4CbiMW9h9VyIhO4E= +github.com/robfig/cron/v3 v3.0.0/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v0.0.0-20190215210624-980c5ac6f3ac/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= +github.com/smartystreets/goconvey v0.0.0-20190306220146-200a235640ff/go.mod h1:KSQcGKpxUMHk3nbYzs/tIBAM2iDooCn0BmttHOJEbLs= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= +github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= +github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M= +github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo= +github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg= +github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk= +github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 h1:YqAladjX7xpA6BM04leXMWAEjS0mTZ5kUU9KRBriQJc= +github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5/go.mod h1:2JjD2zLQYH5HO74y5+aE3remJQvl6q4Sn6aWA2wD1Ng= +github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= +github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= +github.com/tinylib/msgp v1.1.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/willf/bitset v1.1.9/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= +github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.mongodb.org/mongo-driver v1.16.0 h1:tpRsfBJMROVHKpdGyc1BBEzzjDUWjItxbVSZ8Ls4BQ4= +go.mongodb.org/mongo-driver v1.16.0/go.mod h1:oB6AhJQvFQL4LEHyXi6aJzQJtBiTQHiAd83l0GdFaiw= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= +golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0= +golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= +golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= +golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/log/log.go b/log/log.go new file mode 100644 index 0000000..812bf77 --- /dev/null +++ b/log/log.go @@ -0,0 +1,68 @@ +package log + +import ( + "os" + "pcgamedb/config" + "strings" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "gopkg.in/natefinch/lumberjack.v2" +) + +var Logger *zap.Logger +var ConsoleLogger *zap.Logger +var FileLogger *zap.Logger +var TaskLogger *zap.Logger + +func init() { + fileCore, consoleCore, combinedCore, taskCore := buildZapCore(getZapLogLevel(config.Config.LogLevel)) + FileLogger = zap.New(fileCore, zap.AddCaller()) + ConsoleLogger = zap.New(consoleCore, zap.AddCaller()) + Logger = zap.New(combinedCore, zap.AddCaller()) + TaskLogger = zap.New(taskCore, zap.AddCaller()) +} + +func buildZapCore(logLevel zapcore.Level) (fileCore zapcore.Core, consoleCore zapcore.Core, combinedCore zapcore.Core, taskCore zapcore.Core) { + fileWriter := zapcore.AddSync(&lumberjack.Logger{ + Filename: "logs/app.log", + MaxSize: 500, + MaxBackups: 3, + MaxAge: 28, + Compress: true, + }) + taskFileWriter := zapcore.AddSync(&lumberjack.Logger{ + Filename: "logs/task.log", + MaxSize: 500, + MaxBackups: 3, + MaxAge: 28, + Compress: true, + }) + consoleWriter := zapcore.AddSync(os.Stdout) + + encoderConfig := zap.NewProductionEncoderConfig() + encoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder + encoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder + encoderConfig.EncodeCaller = zapcore.ShortCallerEncoder + + fileCore = zapcore.NewCore(zapcore.NewConsoleEncoder(encoderConfig), fileWriter, logLevel) + consoleCore = zapcore.NewCore(zapcore.NewConsoleEncoder(encoderConfig), consoleWriter, logLevel) + combinedCore = zapcore.NewTee(fileCore, consoleCore) + taskCore = zapcore.NewCore(zapcore.NewConsoleEncoder(encoderConfig), taskFileWriter, logLevel) + return +} + +func getZapLogLevel(logLevel string) zapcore.Level { + switch strings.ToLower(logLevel) { + case "debug": + return zap.DebugLevel + case "warn": + return zap.WarnLevel + case "error": + return zap.ErrorLevel + case "info": + return zap.InfoLevel + default: + return zap.InfoLevel + } +} diff --git a/main.go b/main.go new file mode 100644 index 0000000..c00c3da --- /dev/null +++ b/main.go @@ -0,0 +1,17 @@ +package main + +import ( + "pcgamedb/cmd" + "pcgamedb/log" + "strings" + + "go.uber.org/zap" +) + +func main() { + if err := cmd.RootCmd.Execute(); err != nil { + if !strings.Contains(err.Error(), "unknown command") { + log.Logger.Error("Failed to execute command", zap.Error(err)) + } + } +} diff --git a/model/game.go b/model/game.go new file mode 100644 index 0000000..af6b0e6 --- /dev/null +++ b/model/game.go @@ -0,0 +1,40 @@ +package model + +import ( + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type GameInfo struct { + ID primitive.ObjectID `json:"id" bson:"_id"` + Name string `json:"name" bson:"name"` + Description string `json:"description" bson:"description"` + Aliases []string `json:"aliases" bson:"aliases"` + Developers []string `json:"developers" bson:"developers"` + Publishers []string `json:"publishers" bson:"publishers"` + IGDBID int `json:"igdb_id" bson:"igdb_id"` + SteamID int `json:"steam_id" bson:"steam_id"` + GOGID int `json:"-" bson:"gog_id"` + Cover string `json:"cover" bson:"cover"` + Languages []string `json:"languages" bson:"languages"` + Screenshots []string `json:"screenshots" bson:"screenshots"` + GameIDs []primitive.ObjectID `json:"game_ids" bson:"games"` + Games []*GameDownload `json:"game_downloads" bson:"-"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + UpdatedAt time.Time `json:"updated_at" bson:"updated_at"` +} + +type GameDownload struct { + ID primitive.ObjectID `json:"id" bson:"_id"` + Name string `json:"speculative_name" bson:"name"` + RawName string `json:"raw_name,omitempty" bson:"raw_name"` + Download string `json:"download_link,omitempty" bson:"download"` + Size string `json:"size,omitempty" bson:"size"` + Url string `json:"url" bson:"url"` + Password string `json:"password,omitempty" bson:"password"` + Author string `json:"author,omitempty" bson:"author"` + UpdateFlag string `json:"-" bson:"update_flag,omitempty"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + UpdatedAt time.Time `json:"updated_at" bson:"updated_at"` +} diff --git a/model/gog.go b/model/gog.go new file mode 100644 index 0000000..e6cd18c --- /dev/null +++ b/model/gog.go @@ -0,0 +1,171 @@ +package model + +type GOGAppDetail struct { + ID int `json:"id"` + Title string `json:"title"` + PurchaseLink string `json:"purchase_link"` + Slug string `json:"slug"` + ContentSystemCompatibility struct { + Windows bool `json:"windows"` + Osx bool `json:"osx"` + Linux bool `json:"linux"` + } `json:"content_system_compatibility"` + Languages map[string]string `json:"languages"` + Links struct { + PurchaseLink string `json:"purchase_link"` + ProductCard string `json:"product_card"` + Support string `json:"support"` + Forum string `json:"forum"` + } `json:"links"` + InDevelopment struct { + Active bool `json:"active"` + Until interface{} `json:"until"` + } `json:"in_development"` + IsSecret bool `json:"is_secret"` + IsInstallable bool `json:"is_installable"` + GameType string `json:"game_type"` + IsPreOrder bool `json:"is_pre_order"` + ReleaseDate string `json:"release_date"` + Images struct { + Background string `json:"background"` + Logo string `json:"logo"` + Logo2X string `json:"logo2x"` + Icon string `json:"icon"` + SidebarIcon string `json:"sidebarIcon"` + SidebarIcon2X string `json:"sidebarIcon2x"` + MenuNotificationAv string `json:"menuNotificationAv"` + MenuNotificationAv2 string `json:"menuNotificationAv2"` + } `json:"images"` + Dlcs any `json:"dlcs"` + Downloads struct { + Installers []struct { + ID string `json:"id"` + Name string `json:"name"` + Os string `json:"os"` + Language string `json:"language"` + LanguageFull string `json:"language_full"` + Version string `json:"version"` + TotalSize int `json:"total_size"` + Files []struct { + ID string `json:"id"` + Size int `json:"size"` + Downlink string `json:"downlink"` + } `json:"files"` + } `json:"installers"` + Patches []interface{} `json:"patches"` + LanguagePacks []interface{} `json:"language_packs"` + BonusContent []struct { + ID int `json:"id"` + Name string `json:"name"` + Type string `json:"type"` + Count int `json:"count"` + TotalSize int `json:"total_size"` + Files []struct { + ID int `json:"id"` + Size int `json:"size"` + Downlink string `json:"downlink"` + } `json:"files"` + } `json:"bonus_content"` + } `json:"downloads"` + ExpandedDlcs []interface{} `json:"expanded_dlcs"` + Description struct { + Lead string `json:"lead"` + Full string `json:"full"` + WhatsCoolAboutIt string `json:"whats_cool_about_it"` + } `json:"description"` + Screenshots []struct { + ImageID string `json:"image_id"` + FormatterTemplateURL string `json:"formatter_template_url"` + FormattedImages []struct { + FormatterName string `json:"formatter_name"` + ImageURL string `json:"image_url"` + } `json:"formatted_images"` + } `json:"screenshots"` + Videos []interface{} `json:"videos"` + RelatedProducts []interface{} `json:"related_products"` + Changelog string `json:"changelog"` +} + +type GOGSearch struct { + Products []struct { + CustomAttributes []interface{} `json:"customAttributes"` + Developer string `json:"developer"` + Publisher string `json:"publisher"` + Gallery []string `json:"gallery"` + Video struct { + ID string `json:"id"` + Provider string `json:"provider"` + } `json:"video"` + SupportedOperatingSystems []string `json:"supportedOperatingSystems"` + Genres []string `json:"genres"` + GlobalReleaseDate interface{} `json:"globalReleaseDate"` + IsTBA bool `json:"isTBA"` + Price struct { + Currency string `json:"currency"` + Amount string `json:"amount"` + BaseAmount string `json:"baseAmount"` + FinalAmount string `json:"finalAmount"` + IsDiscounted bool `json:"isDiscounted"` + DiscountPercentage int `json:"discountPercentage"` + DiscountDifference string `json:"discountDifference"` + Symbol string `json:"symbol"` + IsFree bool `json:"isFree"` + Discount int `json:"discount"` + IsBonusStoreCreditIncluded bool `json:"isBonusStoreCreditIncluded"` + BonusStoreCreditAmount string `json:"bonusStoreCreditAmount"` + PromoID interface{} `json:"promoId"` + } `json:"price"` + IsDiscounted bool `json:"isDiscounted"` + IsInDevelopment bool `json:"isInDevelopment"` + ID int `json:"id"` + ReleaseDate interface{} `json:"releaseDate"` + Availability struct { + IsAvailable bool `json:"isAvailable"` + IsAvailableInAccount bool `json:"isAvailableInAccount"` + } `json:"availability"` + SalesVisibility struct { + IsActive bool `json:"isActive"` + FromObject struct { + Date string `json:"date"` + TimezoneType int `json:"timezone_type"` + Timezone string `json:"timezone"` + } `json:"fromObject"` + From int `json:"from"` + ToObject struct { + Date string `json:"date"` + TimezoneType int `json:"timezone_type"` + Timezone string `json:"timezone"` + } `json:"toObject"` + To int `json:"to"` + } `json:"salesVisibility"` + Buyable bool `json:"buyable"` + Title string `json:"title"` + Image string `json:"image"` + URL string `json:"url"` + SupportURL string `json:"supportUrl"` + ForumURL string `json:"forumUrl"` + WorksOn struct { + Windows bool `json:"Windows"` + Mac bool `json:"Mac"` + Linux bool `json:"Linux"` + } `json:"worksOn"` + Category string `json:"category"` + OriginalCategory string `json:"originalCategory"` + Rating int `json:"rating"` + Type int `json:"type"` + IsComingSoon bool `json:"isComingSoon"` + IsPriceVisible bool `json:"isPriceVisible"` + IsMovie bool `json:"isMovie"` + IsGame bool `json:"isGame"` + Slug string `json:"slug"` + IsWishlistable bool `json:"isWishlistable"` + ExtraInfo []interface{} `json:"extraInfo"` + AgeLimit int `json:"ageLimit"` + } `json:"products"` + Ts interface{} `json:"ts"` + Page int `json:"page"` + TotalPages int `json:"totalPages"` + TotalResults string `json:"totalResults"` + TotalGamesFound int `json:"totalGamesFound"` + TotalMoviesFound int `json:"totalMoviesFound"` +} diff --git a/model/igdb.go b/model/igdb.go new file mode 100644 index 0000000..1b685a0 --- /dev/null +++ b/model/igdb.go @@ -0,0 +1,98 @@ +package model + +type IGDBGameDetail struct { + ID int `json:"id,omitempty"` + ParentGame int `json:"parent_game,omitempty"` + AgeRatings []int `json:"age_ratings,omitempty"` + AlternativeNames []struct { + Name string `json:"name,omitempty"` + } `json:"alternative_names,omitempty"` + Category int `json:"category,omitempty"` + Cover struct { + URL string `json:"url,omitempty"` + } `json:"cover,omitempty"` + CreatedAt int `json:"created_at,omitempty"` + ExternalGames []int `json:"external_games,omitempty"` + FirstReleaseDate int `json:"first_release_date,omitempty"` + Franchises []int `json:"franchises,omitempty"` + GameModes []int `json:"game_modes,omitempty"` + Genres []int `json:"genres,omitempty"` + InvolvedCompanies []struct { + Company int `json:"company,omitempty"` + Developer bool `json:"developer,omitempty"` + Publisher bool `json:"publisher,omitempty"` + } `json:"involved_companies,omitempty"` + Name string `json:"name,omitempty"` + Platforms []int `json:"platforms,omitempty"` + PlayerPerspectives []int `json:"player_perspectives,omitempty"` + Rating float64 `json:"rating,omitempty"` + RatingCount int `json:"rating_count,omitempty"` + ReleaseDates []int `json:"release_dates,omitempty"` + Screenshots []struct { + URL string `json:"url,omitempty"` + } `json:"screenshots,omitempty"` + SimilarGames []int `json:"similar_games,omitempty"` + Slug string `json:"slug,omitempty"` + Summary string `json:"summary,omitempty"` + Tags []int `json:"tags,omitempty"` + Themes []int `json:"themes,omitempty"` + TotalRating float64 `json:"total_rating,omitempty"` + TotalRatingCount int `json:"total_rating_count,omitempty"` + UpdatedAt int `json:"updated_at,omitempty"` + URL string `json:"url,omitempty"` + VersionParent int `json:"version_parent,omitempty"` + VersionTitle string `json:"version_title,omitempty"` + Checksum string `json:"checksum,omitempty"` + Websites []int `json:"websites,omitempty"` + GameLocalizations []int `json:"game_localizations,omitempty"` + AggregatedRating float64 `json:"aggregated_rating,omitempty"` + AggregatedRatingCount int `json:"aggregated_rating_count,omitempty"` + Artworks []int `json:"artworks,omitempty"` + Bundles []int `json:"bundles,omitempty"` + Collection int `json:"collection,omitempty"` + GameEngines []int `json:"game_engines,omitempty"` + Keywords []int `json:"keywords,omitempty"` + MultiplayerModes []int `json:"multiplayer_modes,omitempty"` + StandaloneExpansions []int `json:"standalone_expansions,omitempty"` + Storyline string `json:"storyline,omitempty"` + Videos []int `json:"videos,omitempty"` + LanguageSupports []struct { + Language int `json:"language,omitempty"` + LanguageSupportType int `json:"language_support_type,omitempty"` + } `json:"language_supports,omitempty"` + Collections []int `json:"collections,omitempty"` +} + +type IGDBGameDetails []*IGDBGameDetail + +type IGDBCompany struct { + ID int `json:"id"` + ChangeDateCategory int `json:"change_date_category"` + Country int `json:"country"` + CreatedAt int `json:"created_at"` + Description string `json:"description"` + Developed []int `json:"developed"` + Logo int `json:"logo"` + Name string `json:"name"` + Parent int `json:"parent"` + Published []int `json:"published"` + Slug string `json:"slug"` + StartDate int `json:"start_date"` + StartDateCategory int `json:"start_date_category"` + UpdatedAt int `json:"updated_at"` + URL string `json:"url"` + Websites []int `json:"websites"` + Checksum string `json:"checksum"` +} + +type IGDBCompanies []*IGDBCompany + +type IGDBSearch struct { + ID int `json:"id"` + AlternativeName string `json:"alternative_name"` + Game int `json:"game"` + Name string `json:"name"` + PublishedAt int `json:"published_at"` +} + +type IGDBSearches []*IGDBSearch diff --git a/model/steam.go b/model/steam.go new file mode 100644 index 0000000..08cff6d --- /dev/null +++ b/model/steam.go @@ -0,0 +1,138 @@ +package model + +type SteamAppDetail struct { + Success bool `json:"success"` + Data struct { + Type string `json:"type"` + Name string `json:"name"` + SteamAppid int `json:"steam_appid"` + RequiredAge any `json:"required_age"` + IsFree bool `json:"is_free"` + ControllerSupport string `json:"controller_support"` + DetailedDescription string `json:"detailed_description"` + AboutTheGame string `json:"about_the_game"` + ShortDescription string `json:"short_description"` + SupportedLanguages string `json:"supported_languages"` + HeaderImage string `json:"header_image"` + CapsuleImage string `json:"capsule_image"` + CapsuleImagev5 string `json:"capsule_imagev5"` + Website string `json:"website"` + PcRequirements any `json:"pc_requirements"` + MacRequirements any `json:"mac_requirements"` + LinuxRequirements any `json:"linux_requirements"` + LegalNotice string `json:"legal_notice"` + Developers []string `json:"developers"` + Publishers []string `json:"publishers"` + PackageGroups []interface{} `json:"package_groups"` + Platforms struct { + Windows bool `json:"windows"` + Mac bool `json:"mac"` + Linux bool `json:"linux"` + } `json:"platforms"` + Metacritic struct { + Score int `json:"score"` + URL string `json:"url"` + } `json:"metacritic"` + Categories []struct { + ID int `json:"id"` + Description string `json:"description"` + } `json:"categories"` + Genres []struct { + ID string `json:"id"` + Description string `json:"description"` + } `json:"genres"` + Screenshots []struct { + ID int `json:"id"` + PathThumbnail string `json:"path_thumbnail"` + PathFull string `json:"path_full"` + } `json:"screenshots"` + Movies []struct { + ID int `json:"id"` + Name string `json:"name"` + Thumbnail string `json:"thumbnail"` + Webm struct { + Num480 string `json:"480"` + Max string `json:"max"` + } `json:"webm"` + Mp4 struct { + Num480 string `json:"480"` + Max string `json:"max"` + } `json:"mp4"` + Highlight bool `json:"highlight"` + } `json:"movies"` + Recommendations struct { + Total int `json:"total"` + } `json:"recommendations"` + Achievements struct { + Total int `json:"total"` + Highlighted []struct { + Name string `json:"name"` + Path string `json:"path"` + } `json:"highlighted"` + } `json:"achievements"` + ReleaseDate struct { + ComingSoon bool `json:"coming_soon"` + Date string `json:"date"` + } `json:"release_date"` + SupportInfo struct { + URL string `json:"url"` + Email string `json:"email"` + } `json:"support_info"` + Background string `json:"background"` + BackgroundRaw string `json:"background_raw"` + ContentDescriptors struct { + Ids []interface{} `json:"ids"` + Notes interface{} `json:"notes"` + } `json:"content_descriptors"` + Ratings struct { + Esrb struct { + Rating string `json:"rating"` + Descriptors string `json:"descriptors"` + UseAgeGate string `json:"use_age_gate"` + RequiredAge string `json:"required_age"` + } `json:"esrb"` + Pegi struct { + Rating string `json:"rating"` + Descriptors string `json:"descriptors"` + } `json:"pegi"` + Oflc struct { + Rating string `json:"rating"` + Descriptors string `json:"descriptors"` + } `json:"oflc"` + } `json:"ratings"` + } `json:"data"` +} + +type SteamPackageDetail struct { + Success bool `json:"success"` + Data struct { + Name string `json:"name"` + PageContent string `json:"page_content"` + PageImage string `json:"page_image"` + HeaderImage string `json:"header_image"` + SmallLogo string `json:"small_logo"` + Apps []struct { + ID int `json:"id"` + Name string `json:"name"` + } `json:"apps"` + Price struct { + Currency string `json:"currency"` + Initial int `json:"initial"` + Final int `json:"final"` + DiscountPercent int `json:"discount_percent"` + Individual int `json:"individual"` + } `json:"price"` + Platforms struct { + Windows bool `json:"windows"` + Mac bool `json:"mac"` + Linux bool `json:"linux"` + } `json:"platforms"` + Controller struct { + FullGamepad bool `json:"full_gamepad"` + } `json:"controller"` + ReleaseDate struct { + ComingSoon bool `json:"coming_soon"` + Date string `json:"date"` + } `json:"release_date"` + } `json:"data"` +} diff --git a/model/steam250.go b/model/steam250.go new file mode 100644 index 0000000..7526053 --- /dev/null +++ b/model/steam250.go @@ -0,0 +1,6 @@ +package model + +type Steam250Item struct { + Name string + SteamID int +} diff --git a/server/handler/clean_game.go b/server/handler/clean_game.go new file mode 100644 index 0000000..50176ab --- /dev/null +++ b/server/handler/clean_game.go @@ -0,0 +1,14 @@ +package handler + +import ( + "net/http" + "pcgamedb/log" + "pcgamedb/task" + + "github.com/gin-gonic/gin" +) + +func CleanGameHandler(ctx *gin.Context) { + task.Clean(log.TaskLogger) + ctx.JSON(http.StatusOK, gin.H{"status": "ok"}) +} diff --git a/server/handler/get_all_authors.go b/server/handler/get_all_authors.go new file mode 100644 index 0000000..2eeb5cd --- /dev/null +++ b/server/handler/get_all_authors.go @@ -0,0 +1,45 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + + "github.com/gin-gonic/gin" +) + +type GetAllAuthorsResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + Authors []string `json:"authors,omitempty"` +} + +// GetAllAuthorsHandler returns all authors +// @Summary Get all authors +// @Description Get all authors +// @Tags author +// @Accept json +// @Produce json +// @Success 200 {object} GetAllAuthorsResponse +// @Failure 500 {object} GetAllAuthorsResponse +// @Router /author [get] +func GetAllAuthorsHandler(ctx *gin.Context) { + authors, err := db.GetAllAuthors() + if err != nil { + ctx.JSON(http.StatusInternalServerError, GetAllAuthorsResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if len(authors) == 0 { + ctx.JSON(http.StatusOK, GetAllAuthorsResponse{ + Status: "ok", + Message: "No authors found", + }) + return + } + ctx.JSON(http.StatusOK, GetAllAuthorsResponse{ + Status: "ok", + Authors: authors, + }) +} diff --git a/server/handler/get_game_Infos_by_name.go b/server/handler/get_game_Infos_by_name.go new file mode 100644 index 0000000..60c0ccc --- /dev/null +++ b/server/handler/get_game_Infos_by_name.go @@ -0,0 +1,60 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" +) + +type GetGameInfosByNameRequest struct { + Name string `uri:"name" binding:"required"` +} + +type GetGameInfosByNameResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + GameInfos []*model.GameInfo `json:"game_infos,omitempty"` +} + +// GetGameInfosByName retrieves game information by game name. +// @Summary Retrieve game info by name +// @Description Retrieves game information details by game name +// @Tags game +// @Accept json +// @Produce json +// @Param name path string true "Game Name" +// @Success 200 {object} GetGameInfosByNameResponse +// @Failure 400 {object} GetGameInfosByNameResponse +// @Failure 500 {object} GetGameInfosByNameResponse +// @Router /game/name/{name} [get] +func GetGameInfosByNameHandler(c *gin.Context) { + var req GetGameInfosByNameRequest + if err := c.ShouldBindUri(&req); err != nil { + c.JSON(http.StatusBadRequest, GetGameInfosByNameResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + games, err := db.GetGameInfosByName(req.Name) + if err != nil { + c.JSON(http.StatusInternalServerError, GetGameInfosByNameResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if len(games) == 0 { + c.JSON(http.StatusOK, GetGameInfosByNameResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + c.JSON(http.StatusOK, GetGameInfosByNameResponse{ + Status: "ok", + GameInfos: games, + }) +} diff --git a/server/handler/get_game_download_by_id.go b/server/handler/get_game_download_by_id.go new file mode 100644 index 0000000..95d527a --- /dev/null +++ b/server/handler/get_game_download_by_id.go @@ -0,0 +1,70 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +type GetGameDownloadByIDRequest struct { + ID string `uri:"id" binding:"required"` +} + +type GetGameDownloadByIDResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + Game *model.GameDownload `json:"game,omitempty"` +} + +// GetGameDownloadByID retrieves game download details by ID. +// @Summary Retrieve game download by ID +// @Description Retrieves details of a game download by game ID +// @Tags game +// @Accept json +// @Produce json +// @Param id path string true "Game Download ID" +// @Success 200 {object} GetGameDownloadByIDResponse +// @Failure 400 {object} GetGameDownloadByIDResponse +// @Failure 500 {object} GetGameDownloadByIDResponse +// @Router /game/raw/id/{id} [get] +func GetGameDownloadByIDHanlder(c *gin.Context) { + var req GetGameDownloadByIDRequest + if err := c.ShouldBindUri(&req); err != nil { + c.JSON(http.StatusBadRequest, GetGameDownloadByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + id, err := primitive.ObjectIDFromHex(req.ID) + if err != nil { + c.JSON(http.StatusBadRequest, GetGameDownloadByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + game, err := db.GetGameDownloadByID(id) + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusOK, GetGameDownloadByIDResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + c.JSON(http.StatusInternalServerError, GetGameDownloadByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + c.JSON(http.StatusOK, GetGameDownloadByIDResponse{ + Status: "ok", + Game: game, + }) +} diff --git a/server/handler/get_game_download_by_raw_name.go b/server/handler/get_game_download_by_raw_name.go new file mode 100644 index 0000000..a6c6430 --- /dev/null +++ b/server/handler/get_game_download_by_raw_name.go @@ -0,0 +1,68 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" + "go.mongodb.org/mongo-driver/mongo" +) + +type GetGameDownloadByRawNameRequest struct { + Name string `uri:"name" binding:"required"` +} + +type GetGameDownloadByRawNameResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + GameDownload []*model.GameDownload `json:"game_downloads,omitempty"` +} + +// GetGameDownloadByRawName retrieves game download details by raw name. +// @Summary Retrieve game download by raw name +// @Description Retrieves details of a game download by its raw name +// @Tags game +// @Accept json +// @Produce json +// @Param name path string true "Game Download Raw Name" +// @Success 200 {object} GetGameDownloadByRawNameResponse +// @Failure 400 {object} GetGameDownloadByRawNameResponse +// @Failure 500 {object} GetGameDownloadByRawNameResponse +// @Router /game/raw/name/{name} [get] +func GetGameDownloadByRawNameHandler(c *gin.Context) { + var req GetGameDownloadByRawNameRequest + if err := c.ShouldBindUri(&req); err != nil { + c.JSON(http.StatusBadRequest, GetGameDownloadByRawNameResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + gameDownload, err := db.GetGameDownloadByRawName(req.Name) + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusOK, GetGameDownloadByRawNameResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + c.JSON(http.StatusInternalServerError, GetGameDownloadByRawNameResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if gameDownload == nil { + c.JSON(http.StatusOK, GetGameDownloadByRawNameResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + c.JSON(http.StatusOK, GetGameDownloadByRawNameResponse{ + Status: "ok", + GameDownload: gameDownload, + }) +} diff --git a/server/handler/get_game_downloads_by_author.go b/server/handler/get_game_downloads_by_author.go new file mode 100644 index 0000000..3a9d213 --- /dev/null +++ b/server/handler/get_game_downloads_by_author.go @@ -0,0 +1,82 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" +) + +type GetGameDownloadsByAuthorRequest struct { + Author string `uri:"author" binding:"required"` + Page int `form:"page" json:"page"` + PageSize int `form:"page_size" json:"page_size"` +} + +type GetGameDownloadsByAuthorResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + TotalPage int `json:"total_page"` + GameDownloads []*model.GameDownload `json:"game_downloads,omitempty"` +} + +// GetGameDownloadsByAuthorHandler returns all game downloads by author +// @Summary Get game downloads by author +// @Description Get game downloads by author +// @Tags game +// @Accept json +// @Produce json +// @Param author path string true "Author" +// @Param page query int false "Page" +// @Param page_size query int false "Page Size" +// @Success 200 {object} GetGameDownloadsByAuthorResponse +// @Failure 400 {object} GetGameDownloadsByAuthorResponse +// @Failure 500 {object} GetGameDownloadsByAuthorResponse +// @Router /game/raw/author/{author} [get] +func GetGameDownloadsByAuthorHandler(ctx *gin.Context) { + var req GetGameDownloadsByAuthorRequest + if err := ctx.ShouldBindUri(&req); err != nil { + ctx.JSON(http.StatusBadRequest, GetGameDownloadsByAuthorResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if err := ctx.ShouldBind(&req); err != nil { + ctx.JSON(http.StatusBadRequest, GetGameDownloadsByAuthorResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if req.Page == 0 || req.Page < 0 { + req.Page = 1 + } + if req.PageSize == 0 || req.PageSize < 0 { + req.PageSize = 10 + } + if req.PageSize > 10 { + req.PageSize = 10 + } + downloads, totalPage, err := db.GetGameDownloadsByAuthorPagination(req.Author, req.Page, req.PageSize) + if err != nil { + ctx.JSON(http.StatusInternalServerError, GetGameDownloadsByAuthorResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if len(downloads) == 0 { + ctx.JSON(http.StatusOK, GetGameDownloadsByAuthorResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + ctx.JSON(http.StatusOK, GetGameDownloadsByAuthorResponse{ + Status: "ok", + TotalPage: totalPage, + GameDownloads: downloads, + }) +} diff --git a/server/handler/get_game_info_by_id.go b/server/handler/get_game_info_by_id.go new file mode 100644 index 0000000..86e97f8 --- /dev/null +++ b/server/handler/get_game_info_by_id.go @@ -0,0 +1,78 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +type GetGameInfoByIDRequest struct { + ID string `uri:"id" binding:"required"` +} + +type GetGameInfoByIDResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + GameInfo *model.GameInfo `json:"game_info,omitempty"` +} + +// GetGameInfoByID retrieves game information by ID. +// @Summary Retrieve game info by ID +// @Description Retrieves details of a game by game ID +// @Tags game +// @Accept json +// @Produce json +// @Param id path string true "Game ID" +// @Success 200 {object} GetGameInfoByIDResponse +// @Failure 400 {object} GetGameInfoByIDResponse +// @Failure 500 {object} GetGameInfoByIDResponse +// @Router /game/id/{id} [get] +func GetGameInfoByIDHandler(c *gin.Context) { + var req GetGameDownloadByIDRequest + if err := c.ShouldBindUri(&req); err != nil { + c.JSON(http.StatusBadRequest, GetGameInfoByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + id, err := primitive.ObjectIDFromHex(req.ID) + if err != nil { + c.JSON(http.StatusBadRequest, GetGameInfoByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + gameInfo, err := db.GetGameInfoByID(id) + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusOK, GetGameInfoByIDResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + c.JSON(http.StatusInternalServerError, GetGameInfoByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + gameInfo.Games, err = db.GetGameDownloadsByIDs(gameInfo.GameIDs) + if err != nil { + c.JSON(http.StatusInternalServerError, GetGameInfoByIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + c.JSON(http.StatusOK, GetGameInfoByIDResponse{ + Status: "ok", + GameInfo: gameInfo, + }) +} diff --git a/server/handler/get_game_info_by_platform_id.go b/server/handler/get_game_info_by_platform_id.go new file mode 100644 index 0000000..cae7148 --- /dev/null +++ b/server/handler/get_game_info_by_platform_id.go @@ -0,0 +1,63 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" + "go.mongodb.org/mongo-driver/mongo" +) + +type GetGameInfoByPlatformIDRequest struct { + PlatformType string `uri:"platform_type" binding:"required"` + PlatformID int `uri:"platform_id" binding:"required"` +} + +type GetGameInfoByPlatformIDResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + GameInfo *model.GameInfo `json:"game_info,omitempty"` +} + +// GetGameInfoByPlatformID retrieves game information by platform and ID. +// @Summary Retrieve game info by platform ID +// @Description Retrieves game information based on a platform type and platform ID +// @Tags game +// @Accept json +// @Produce json +// @Param platform_type path string true "Platform Type" +// @Param platform_id path int true "Platform ID" +// @Success 200 {object} GetGameInfoByPlatformIDResponse +// @Failure 400 {object} GetGameInfoByPlatformIDResponse +// @Failure 500 {object} GetGameInfoByPlatformIDResponse +// @Router /game/platform/{platform_type}/{platform_id} [get] +func GetGameInfoByPlatformIDHandler(c *gin.Context) { + var req GetGameInfoByPlatformIDRequest + if err := c.ShouldBindUri(&req); err != nil { + c.JSON(http.StatusBadRequest, GetGameInfoByPlatformIDResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + gameInfo, err := db.GetGameInfoByPlatformID(req.PlatformType, req.PlatformID) + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusOK, GetGameInfoByPlatformIDResponse{ + Status: "ok", + Message: "No results found", + }) + } else { + c.JSON(http.StatusInternalServerError, GetGameInfoByPlatformIDResponse{ + Status: "error", + Message: err.Error(), + }) + } + } else { + c.JSON(http.StatusOK, GetGameInfoByPlatformIDResponse{ + Status: "ok", + GameInfo: gameInfo, + }) + } +} diff --git a/server/handler/get_ranking.go b/server/handler/get_ranking.go new file mode 100644 index 0000000..2ed4752 --- /dev/null +++ b/server/handler/get_ranking.go @@ -0,0 +1,65 @@ +package handler + +import ( + "net/http" + "pcgamedb/crawler" + "pcgamedb/model" + + "github.com/gin-gonic/gin" +) + +type GetRankingResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + Games []*model.GameInfo `json:"games"` +} + +// GetRanking retrieves game rankings. +// @Summary Retrieve rankings +// @Description Retrieves rankings based on a specified type +// @Tags ranking +// @Accept json +// @Produce json +// @Param type path string true "Ranking Type(top, week-top, best-of-the-year, most-played)" +// @Success 200 {object} GetRankingResponse +// @Failure 400 {object} GetRankingResponse +// @Failure 500 {object} GetRankingResponse +// @Router /ranking/{type} [get] +func GetRankingHandler(c *gin.Context) { + rankingType, exist := c.Params.Get("type") + if !exist { + c.JSON(http.StatusBadRequest, GetRankingResponse{ + Status: "error", + Message: "Missing ranking type", + }) + } + var f func() ([]*model.GameInfo, error) + switch rankingType { + case "top": + f = crawler.GetSteam250Top250Cache + case "week-top": + f = crawler.GetSteam250WeekTop50Cache + case "best-of-the-year": + f = crawler.GetSteam250BestOfTheYearCache + case "most-played": + f = crawler.GetSteam250MostPlayedCache + default: + c.JSON(http.StatusBadRequest, GetRankingResponse{ + Status: "error", + Message: "Invalid ranking type", + }) + return + } + rank, err := f() + if err != nil { + c.JSON(http.StatusInternalServerError, GetRankingResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + c.JSON(http.StatusOK, GetRankingResponse{ + Status: "ok", + Games: rank, + }) +} diff --git a/server/handler/get_unorganized_game_downloads.go b/server/handler/get_unorganized_game_downloads.go new file mode 100644 index 0000000..7e1925f --- /dev/null +++ b/server/handler/get_unorganized_game_downloads.go @@ -0,0 +1,65 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" +) + +type GetUnorganizedGameDownloadsRequest struct { + Num int `json:"num" form:"num"` +} + +type GetUnorganizedGameDownloadsResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + Size int `json:"size,omitempty"` + GameDownloads []*model.GameDownload `json:"game_downloads,omitempty"` +} + +// GetUnorganizedGameDownloads retrieves a list of unorganized game downloads. +// @Summary List unorganized game downloads +// @Description Retrieves game downloads that have not been organized +// @Tags game +// @Accept json +// @Produce json +// @Param num query int false "Number of game downloads to retrieve" +// @Success 200 {object} GetUnorganizedGameDownloadsResponse +// @Failure 400 {object} GetUnorganizedGameDownloadsResponse +// @Failure 500 {object} GetUnorganizedGameDownloadsResponse +// @Router /game/raw/unorganized [get] +func GetUnorganizedGameDownloadsHandler(c *gin.Context) { + var req GetUnorganizedGameDownloadsRequest + if err := c.ShouldBind(&req); err != nil { + c.JSON(http.StatusBadRequest, GetUnorganizedGameDownloadsResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if req.Num == 0 || req.Num < 0 { + req.Num = -1 + } + gameDownloads, err := db.GetUnorganizedGameDownloads(req.Num) + if err != nil { + c.JSON(http.StatusInternalServerError, GetUnorganizedGameDownloadsResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if len(gameDownloads) == 0 { + c.JSON(http.StatusOK, GetUnorganizedGameDownloadsResponse{ + Status: "ok", + Message: "No unorganized game downloads found", + }) + return + } + c.JSON(http.StatusOK, GetUnorganizedGameDownloadsResponse{ + Status: "ok", + GameDownloads: gameDownloads, + Size: len(gameDownloads), + }) +} diff --git a/server/handler/healthcheck.go b/server/handler/healthcheck.go new file mode 100644 index 0000000..90d99e2 --- /dev/null +++ b/server/handler/healthcheck.go @@ -0,0 +1,61 @@ +package handler + +import ( + "fmt" + "net/http" + "pcgamedb/config" + "pcgamedb/db" + "runtime" + "time" + + "github.com/gin-gonic/gin" +) + +type HealthCheckResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + Date string `json:"date"` + Uptime string `json:"uptime"` + Alloc string `json:"alloc"` + AutoCrawl bool `json:"auto_crawl"` + GameDownload int64 `json:"game_download,omitempty"` + GameInfo int64 `json:"game_info,omitempty"` + Unorganized int64 `json:"unorganized,omitempty"` + RedisAvaliable bool `json:"redis_avaliable"` + OnlineFixAvaliable bool `json:"online_fix_avaliable"` + MegaAvaliable bool `json:"mega_avaliable"` +} + +// HealthCheckHandler performs a health check of the service. +// @Summary Health Check +// @Description Performs a server health check and returns detailed server status including the current time, uptime, and configuration settings such as AutoCrawl. +// @Tags health +// @Accept json +// @Produce json +// @Success 200 {object} HealthCheckResponse +// @Failure 500 {string} HealthCheckResponse +// @Router /healthcheck [get] +func HealthCheckHandler(c *gin.Context) { + var m runtime.MemStats + runtime.ReadMemStats(&m) + downloadCount, _ := db.GetGameDownloadCount() + infoCount, _ := db.GetGameInfoCount() + unorganized, err := db.GetUnorganizedGameDownloads(-1) + unorganizedCount := int64(0) + if err == nil { + unorganizedCount = int64(len(unorganized)) + } + c.JSON(http.StatusOK, HealthCheckResponse{ + Status: "ok", + Date: time.Now().Format("2006-01-02 15:04:05"), + Uptime: time.Since(config.Runtime.ServerStartTime).String(), + AutoCrawl: config.Config.Server.AutoCrawl, + Alloc: fmt.Sprintf("%.2f MB", float64(m.Alloc)/1024.0/1024.0), + GameDownload: downloadCount, + GameInfo: infoCount, + Unorganized: unorganizedCount, + RedisAvaliable: config.Config.RedisAvaliable, + OnlineFixAvaliable: config.Config.OnlineFixAvaliable, + MegaAvaliable: config.Config.MegaAvaliable, + }) +} diff --git a/server/handler/organize_game_info.go b/server/handler/organize_game_info.go new file mode 100644 index 0000000..8128353 --- /dev/null +++ b/server/handler/organize_game_info.go @@ -0,0 +1,67 @@ +package handler + +import ( + "net/http" + "pcgamedb/crawler" + "pcgamedb/model" + + "github.com/gin-gonic/gin" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type OrganizeGameDownloadRequest struct { + Platform string `form:"platform" json:"platform" binding:"required"` + GameID string `form:"game_id" json:"game_id" binding:"required"` + PlatformID int `form:"platform_id" json:"platform_id" binding:"required"` +} + +type OrganizeGameDownloadResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + GameInfo *model.GameInfo `json:"game_info,omitempty"` +} + +// OrganizeGameDownload organizes a specific game download. +// @Summary Organize a game download +// @Description Organizes a game download based on platform and game ID +// @Tags game +// @Accept json +// @Produce json +// @Param Authorization header string true "Authorization: Bearer " +// @Param body body OrganizeGameDownloadRequest true "Organize Game Download Request" +// @Success 200 {object} OrganizeGameDownloadResponse +// @Failure 400 {object} OrganizeGameDownloadResponse +// @Failure 401 {object} OrganizeGameDownloadResponse +// @Failure 500 {object} OrganizeGameDownloadResponse +// @Security BearerAuth +// @Router /game/raw/organize [post] +func OrganizeGameDownloadHandler(c *gin.Context) { + var req OrganizeGameDownloadRequest + if err := c.ShouldBind(&req); err != nil { + c.JSON(http.StatusBadRequest, OrganizeGameDownloadResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + objID, err := primitive.ObjectIDFromHex(req.GameID) + if err != nil { + c.JSON(http.StatusBadRequest, OrganizeGameDownloadResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + info, err := crawler.OrganizeGameDownloadManually(objID, req.Platform, req.PlatformID) + if err != nil { + c.JSON(http.StatusInternalServerError, OrganizeGameDownloadResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + c.JSON(http.StatusOK, OrganizeGameDownloadResponse{ + Status: "ok", + GameInfo: info, + }) +} diff --git a/server/handler/search_games.go b/server/handler/search_games.go new file mode 100644 index 0000000..1a641fa --- /dev/null +++ b/server/handler/search_games.go @@ -0,0 +1,75 @@ +package handler + +import ( + "net/http" + "pcgamedb/db" + "pcgamedb/model" + + "github.com/gin-gonic/gin" +) + +type SearchGamesRequest struct { + Keyword string `form:"keyword" json:"keyword" binding:"required,min=4,max=64"` + Page int `form:"page" json:"page"` + PageSize int `form:"page_size" json:"page_size"` +} + +type SearchGamesResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + TotalPage int `json:"total_page,omitempty"` + GameInfos []*model.GameInfo `json:"game_infos,omitempty"` +} + +// SearchGames searches for games based on a keyword. +// @Summary Search games +// @Description Searches for games based on the provided keyword +// @Tags game +// @Accept json +// @Produce json +// @Param keyword query string true "Search keyword" +// @Param page query int false "Page number" +// @Param page_size query int false "Number of items per page" +// @Success 200 {object} SearchGamesResponse +// @Failure 400 {object} SearchGamesResponse +// @Failure 500 {object} SearchGamesResponse +// @Router /game/search [get] +func SearchGamesHandler(c *gin.Context) { + var req SearchGamesRequest + if err := c.ShouldBind(&req); err != nil { + c.JSON(http.StatusBadRequest, SearchGamesResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if req.Page == 0 || req.Page < 0 { + req.Page = 1 + } + if req.PageSize == 0 || req.PageSize < 0 { + req.PageSize = 10 + } + if req.PageSize > 10 { + req.PageSize = 10 + } + items, totalPage, err := db.SearchGameInfosCache(req.Keyword, req.Page, req.PageSize) + if err != nil { + c.JSON(http.StatusInternalServerError, SearchGamesResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + if len(items) == 0 { + c.JSON(http.StatusOK, SearchGamesResponse{ + Status: "ok", + Message: "No results found", + }) + return + } + c.JSON(http.StatusOK, SearchGamesResponse{ + Status: "ok", + TotalPage: totalPage, + GameInfos: items, + }) +} diff --git a/server/handler/update_game_info.go b/server/handler/update_game_info.go new file mode 100644 index 0000000..87884bd --- /dev/null +++ b/server/handler/update_game_info.go @@ -0,0 +1,100 @@ +package handler + +import ( + "net/http" + "pcgamedb/crawler" + "pcgamedb/db" + "pcgamedb/model" + "strings" + + "github.com/gin-gonic/gin" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type UpdateGameInfoRequest struct { + GameID string `json:"game_id" binding:"required"` + Platform string `json:"platform" binding:"required"` + PlatformID int `json:"platform_id" binding:"required"` +} + +type UpdateGameInfoResponse struct { + Status string `json:"status"` + Message string `json:"message"` + GameInfo *model.GameInfo `json:"game_info,omitempty"` +} + +// UpdateGameInfoHandler updates game information. +// @Summary Update game info +// @Description Updates details of a game +// @Tags game +// @Accept json +// @Produce json +// @Param Authorization header string true "Authorization: Bearer " +// @Param body body handler.UpdateGameInfoRequest true "Update Game Info Request" +// @Success 200 {object} handler.UpdateGameInfoResponse +// @Failure 400 {object} handler.UpdateGameInfoResponse +// @Failure 401 {object} handler.UpdateGameInfoResponse +// @Failure 500 {object} handler.UpdateGameInfoResponse +// @Router /game/update [post] +func UpdateGameInfoHandler(c *gin.Context) { + var req UpdateGameInfoRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, UpdateGameInfoResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + req.Platform = strings.ToLower(req.Platform) + platformMap := map[string]bool{ + "steam": true, + "igdb": true, + "gog": true, + } + if _, ok := platformMap[req.Platform]; !ok { + c.JSON(http.StatusBadRequest, UpdateGameInfoResponse{ + Status: "error", + Message: "Invalid platform", + }) + return + } + objID, err := primitive.ObjectIDFromHex(req.GameID) + if err != nil { + c.JSON(http.StatusBadRequest, UpdateGameInfoResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + info, err := db.GetGameInfoByID(objID) + if err != nil { + c.JSON(http.StatusInternalServerError, UpdateGameInfoResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + newInfo, err := crawler.GenerateGameInfo(req.Platform, req.PlatformID) + if err != nil { + c.JSON(http.StatusInternalServerError, UpdateGameInfoResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + newInfo.ID = objID + newInfo.GameIDs = info.GameIDs + err = db.SaveGameInfo(newInfo) + if err != nil { + c.JSON(http.StatusInternalServerError, UpdateGameInfoResponse{ + Status: "error", + Message: err.Error(), + }) + return + } + c.JSON(http.StatusOK, UpdateGameInfoResponse{ + Status: "ok", + Message: "Game info updated successfully", + GameInfo: newInfo, + }) +} diff --git a/server/middleware/auth.go b/server/middleware/auth.go new file mode 100644 index 0000000..d25a939 --- /dev/null +++ b/server/middleware/auth.go @@ -0,0 +1,42 @@ +package middleware + +import ( + "net/http" + "pcgamedb/config" + "strings" + + "github.com/gin-gonic/gin" +) + +func Auth() gin.HandlerFunc { + apiKey := config.Config.Server.SecretKey + if apiKey == "" { + return func(c *gin.Context) { + c.JSON(http.StatusInternalServerError, gin.H{ + "status": "error", + "message": "API key is not configured properly.", + }) + c.Abort() + } + } + return func(c *gin.Context) { + auth := c.GetHeader("Authorization") + if auth == "" { + c.JSON(http.StatusUnauthorized, gin.H{ + "status": "error", + "message": "Unauthorized. No API key provided.", + }) + c.Abort() + return + } + if strings.TrimPrefix(auth, "Bearer ") != apiKey { + c.JSON(http.StatusUnauthorized, gin.H{ + "status": "error", + "message": "Unauthorized. Invalid API key.", + }) + c.Abort() + return + } + c.Next() + } +} diff --git a/server/middleware/log.go b/server/middleware/log.go new file mode 100644 index 0000000..b1192ba --- /dev/null +++ b/server/middleware/log.go @@ -0,0 +1,40 @@ +package middleware + +import ( + "pcgamedb/log" + "strconv" + "time" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" +) + +func Logger() gin.HandlerFunc { + return func(c *gin.Context) { + startTime := time.Now() + + c.Next() + + endTime := time.Now() + latencyTime := endTime.Sub(startTime).Milliseconds() + reqMethod := c.Request.Method + reqURI := c.Request.RequestURI + statusCode := c.Writer.Status() + clientIP := c.ClientIP() + + log.Logger.Info( + "request", + zap.Int("code", statusCode), + zap.String("method", reqMethod), + zap.String("uri", reqURI), + zap.String("ip", clientIP), + zap.String("latency", strconv.Itoa(int(latencyTime))+"ms"), + ) + + if len(c.Errors) > 0 { + for _, e := range c.Errors.Errors() { + log.Logger.Error(e) + } + } + } +} diff --git a/server/middleware/recover.go b/server/middleware/recover.go new file mode 100644 index 0000000..9296a2f --- /dev/null +++ b/server/middleware/recover.go @@ -0,0 +1,21 @@ +package middleware + +import ( + "net/http" + "pcgamedb/log" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" +) + +func Recovery() gin.HandlerFunc { + return func(c *gin.Context) { + defer func() { + if rec := recover(); rec != nil { + log.Logger.Error("Recovery", zap.Any("error", rec), zap.Stack("stacktrace")) + c.JSON(http.StatusInternalServerError, gin.H{"status": "error"}) + } + }() + c.Next() + } +} diff --git a/server/route.go b/server/route.go new file mode 100644 index 0000000..6effbdb --- /dev/null +++ b/server/route.go @@ -0,0 +1,43 @@ +package server + +import ( + "pcgamedb/server/handler" + "pcgamedb/server/middleware" + + "github.com/gin-contrib/cors" + "github.com/gin-gonic/gin" + + docs "pcgamedb/docs" + + swaggerfiles "github.com/swaggo/files" + ginSwagger "github.com/swaggo/gin-swagger" +) + +func initRoute(app *gin.Engine) { + app.Use(cors.New(cors.Config{ + AllowAllOrigins: true, + })) + + GameInfoGroup := app.Group("/game") + GameDownloadGroup := GameInfoGroup.Group("/raw") + + GameDownloadGroup.GET("/unorganized", handler.GetUnorganizedGameDownloadsHandler) + GameDownloadGroup.POST("/organize", middleware.Auth(), handler.OrganizeGameDownloadHandler) + GameDownloadGroup.GET("/id/:id", handler.GetGameDownloadByIDHanlder) + GameDownloadGroup.GET("/name/:name", handler.GetGameDownloadByRawNameHandler) + GameDownloadGroup.GET("/author/:author", handler.GetGameDownloadsByAuthorHandler) + + GameInfoGroup.GET("/search", handler.SearchGamesHandler) + GameInfoGroup.GET("/name/:name", handler.GetGameInfosByNameHandler) + GameInfoGroup.GET("/platform/:platform_type/:platform_id", handler.GetGameInfoByPlatformIDHandler) + GameInfoGroup.GET("/id/:id", handler.GetGameInfoByIDHandler) + GameInfoGroup.PUT("/update", middleware.Auth(), handler.UpdateGameInfoHandler) + + app.GET("/ranking/:type", handler.GetRankingHandler) + app.GET("/healthcheck", handler.HealthCheckHandler) + app.GET("/author", handler.GetAllAuthorsHandler) + app.POST("/clean", middleware.Auth(), handler.CleanGameHandler) + + docs.SwaggerInfo.BasePath = "/api" + app.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerfiles.Handler)) +} diff --git a/server/server.go b/server/server.go new file mode 100644 index 0000000..41c6e1e --- /dev/null +++ b/server/server.go @@ -0,0 +1,46 @@ +package server + +import ( + "io" + "pcgamedb/cache" + "pcgamedb/config" + "pcgamedb/db" + "pcgamedb/log" + "pcgamedb/server/middleware" + "pcgamedb/task" + "time" + + "github.com/gin-gonic/gin" + "github.com/robfig/cron/v3" + "go.uber.org/zap" +) + +func init() { + config.Runtime.ServerStartTime = time.Now() +} + +func Run() { + db.CheckConnect() + cache.CheckConnect() + gin.SetMode(gin.ReleaseMode) + gin.DefaultWriter = io.Discard + app := gin.New() + app.Use(middleware.Logger()) + app.Use(middleware.Recovery()) + initRoute(app) + log.Logger.Info("Server running", zap.String("port", config.Config.Server.Port)) + if config.Config.Server.AutoCrawl { + go func() { + c := cron.New() + _, err := c.AddFunc("0 */3 * * *", func() { task.Crawl(log.TaskLogger) }) + if err != nil { + log.Logger.Error("Error adding cron job", zap.Error(err)) + } + c.Start() + }() + } + err := app.Run(":" + config.Config.Server.Port) + if err != nil { + log.Logger.Panic("Failed to run server", zap.Error(err)) + } +} diff --git a/task/clean.go b/task/clean.go new file mode 100644 index 0000000..a7f193c --- /dev/null +++ b/task/clean.go @@ -0,0 +1,35 @@ +package task + +import ( + "pcgamedb/db" + + "go.uber.org/zap" +) + +func Clean(logger *zap.Logger) { + ids, err := db.DeduplicateGames() + if err != nil { + logger.Error("Failed to deduplicate games", zap.Error(err)) + } + for _, id := range ids { + logger.Info("Deduplicated game", zap.Any("game_id", id)) + } + idmap, err := db.CleanOrphanGamesInGameInfos() + if err != nil { + logger.Error("Failed to clean orphan games", zap.Error(err)) + } + for _, id := range idmap { + logger.Info("Cleaned orphan game in game info", zap.Any("in", id), zap.Any("removed", idmap[id])) + } + ids, err = db.CleanGameInfoWithEmptyGameIDs() + if err != nil { + logger.Error("Failed to clean game info with empty game ids", zap.Error(err)) + } + for _, id := range ids { + logger.Info("Cleaned game info with empty game ids", zap.Any("game_id", id)) + } + err = db.MergeSameNameGameInfos() + if err != nil { + logger.Error("Failed to merge same name game infos", zap.Error(err)) + } +} diff --git a/task/crawl.go b/task/crawl.go new file mode 100644 index 0000000..d0493e5 --- /dev/null +++ b/task/crawl.go @@ -0,0 +1,38 @@ +package task + +import ( + "pcgamedb/crawler" + "pcgamedb/model" + + "go.uber.org/zap" +) + +func Crawl(logger *zap.Logger) { + var games []*model.GameDownload + var crawlerMap = crawler.BuildCrawlerMap(logger) + for _, item := range crawlerMap { + if c, ok := item.(crawler.PagedCrawler); ok { + g, err := c.CrawlMulti([]int{1, 2, 3}) + if err != nil { + logger.Error("Failed to crawl games", zap.Error(err)) + } + games = append(games, g...) + } else if c, ok := item.(crawler.SimpleCrawler); ok { + g, err := c.CrawlAll() + if err != nil { + logger.Error("Failed to crawl games", zap.Error(err)) + } + games = append(games, g...) + } + } + logger.Info("Crawled finished", zap.Int("count", len(games))) + for _, game := range games { + logger.Info( + "Crawled game", + zap.String("name", game.RawName), + zap.String("author", game.Author), + zap.String("url", game.Url), + ) + } + Clean(logger) +} diff --git a/utils/fetch.go b/utils/fetch.go new file mode 100644 index 0000000..0391cb8 --- /dev/null +++ b/utils/fetch.go @@ -0,0 +1,178 @@ +package utils + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "io" + "net" + "net/http" + "net/url" + "strings" + "time" + + "golang.org/x/net/html/charset" +) + +const userAgent string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36" + +type FetchConfig struct { + Method string + Url string + Data interface{} + RetryTimes int + Headers map[string]string + Cookies map[string]string +} + +type FetchResponse struct { + StatusCode int + Data []byte + Header http.Header + Cookie []*http.Cookie +} + +func Fetch(cfg FetchConfig) (*FetchResponse, error) { + var req *http.Request + var resp *http.Response + var backoff time.Duration = 1 + var reqBody io.Reader = nil + var err error + + if cfg.RetryTimes == 0 { + cfg.RetryTimes = 3 + } + if cfg.Method == "" { + cfg.Method = "GET" + } + + if cfg.Data != nil && (cfg.Method == "POST" || cfg.Method == "PUT") { + if cfg.Headers == nil { + cfg.Headers = map[string]string{} + } + if _, exist := cfg.Headers["Content-Type"]; !exist { + cfg.Headers["Content-Type"] = "application/json" + } + v := cfg.Headers["Content-Type"] + if v == "application/x-www-form-urlencoded" { + switch data := cfg.Data.(type) { + case map[string]string: + params := url.Values{} + for k, v := range data { + params.Set(k, v) + } + reqBody = strings.NewReader(params.Encode()) + case string: + reqBody = strings.NewReader(data) + case url.Values: + reqBody = strings.NewReader(data.Encode()) + default: + return nil, errors.New("unsupported data type") + } + } else if v == "application/json" { + var jsonData []byte + jsonData, err = json.Marshal(cfg.Data) + if err != nil { + return nil, err + } + reqBody = bytes.NewReader(jsonData) + } else { + reqBody = strings.NewReader(cfg.Data.(string)) + } + } + + for retryTime := 0; retryTime <= cfg.RetryTimes; retryTime++ { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + req, err = http.NewRequestWithContext(ctx, cfg.Method, cfg.Url, reqBody) + if err != nil { + return nil, err + } + if cfg.Method == "POST" || cfg.Method == "PUT" { + req.Header.Set("Content-Type", "application/json") + } + if v, exist := cfg.Headers["User-Agent"]; exist { + if v != "" { + req.Header.Set("User-Agent", v) + } + } else { + req.Header.Set("User-Agent", userAgent) + } + for k, v := range cfg.Headers { + req.Header.Set(k, v) + } + for k, v := range cfg.Cookies { + req.AddCookie(&http.Cookie{Name: k, Value: v}) + } + resp, err = http.DefaultClient.Do(req) + if err != nil { + if isRetryableError(err) { + err = errors.New("request error: " + err.Error()) + time.Sleep(backoff * time.Second) + backoff *= 2 + continue + } + } + + if resp == nil { + return nil, errors.New("response is nil") + } + + if isRetryableStatusCode(resp.StatusCode) { + err = errors.New("response status code: " + resp.Status) + time.Sleep(backoff * time.Second) + backoff *= 2 + continue + } + + contentType := resp.Header.Get("Content-Type") + var reader io.Reader + if strings.Contains(contentType, "charset=") { + reader, err = charset.NewReader(resp.Body, contentType) + } else { + reader = resp.Body + } + if err != nil { + return nil, err + } + dataBytes, err := io.ReadAll(reader) + if err != nil { + return nil, err + } + + res := &FetchResponse{ + StatusCode: resp.StatusCode, + Header: resp.Header, + Cookie: resp.Cookies(), + Data: dataBytes, + } + + return res, nil + } + return nil, err +} + +func isRetryableStatusCode(statusCode int) bool { + switch statusCode { + case http.StatusInternalServerError, + http.StatusBadGateway, + http.StatusServiceUnavailable, + http.StatusGatewayTimeout, + http.StatusTooManyRequests: + return true + default: + return false + } +} + +func isRetryableError(err error) bool { + if err != nil { + var netErr net.Error + if errors.As(err, &netErr) && netErr.Timeout() { + return true + } + } + return false +} diff --git a/utils/is_russian.go b/utils/is_russian.go new file mode 100644 index 0000000..6577486 --- /dev/null +++ b/utils/is_russian.go @@ -0,0 +1,12 @@ +package utils + +import "unicode" + +func ContainsRussian(s string) bool { + for _, r := range s { + if unicode.Is(unicode.Cyrillic, r) { + return true + } + } + return false +} diff --git a/utils/keeplinks.go b/utils/keeplinks.go new file mode 100644 index 0000000..1339a40 --- /dev/null +++ b/utils/keeplinks.go @@ -0,0 +1,27 @@ +package utils + +import ( + "bytes" + "fmt" + "strings" + + "github.com/PuerkitoBio/goquery" +) + +func SolveKeepLinks(url string) (string, error) { + id := url[strings.LastIndex(url, "/")+1:] + resp, err := Fetch(FetchConfig{ + Url: url, + Cookies: map[string]string{ + fmt.Sprintf("flag[%s]", id): "1", + }, + }) + if err != nil { + return "", err + } + doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) + if err != nil { + return "", err + } + return doc.Find(".livelbl a").Text(), nil +} diff --git a/utils/magnet.go b/utils/magnet.go new file mode 100644 index 0000000..0f423bf --- /dev/null +++ b/utils/magnet.go @@ -0,0 +1,85 @@ +package utils + +import ( + "bytes" + "fmt" + "strconv" + "strings" + + "github.com/anacrolix/torrent/metainfo" +) + +func ConvertTorrentToMagnet(torrent []byte) (string, string, error) { + minfo, err := metainfo.Load(bytes.NewReader(torrent)) + if err != nil { + return "", "", err + } + info, err := minfo.UnmarshalInfo() + if err != nil { + return "", "", err + } + var size int64 = info.Length + if size == 0 { + for _, file := range info.Files { + size += file.Length + } + } + infoHash := minfo.HashInfoBytes() + magnet := minfo.Magnet(&infoHash, &info) + return magnet.String(), FormatSize(size), nil +} + +func FormatSize(size int64) string { + const ( + _ = iota + KB int64 = 1 << (10 * iota) + MB + GB + TB + ) + switch { + case size >= GB: + return fmt.Sprintf("%.1f GB", float64(size)/float64(GB)) + case size >= MB: + return fmt.Sprintf("%.1f MB", float64(size)/float64(MB)) + case size >= KB: + return fmt.Sprintf("%.1f KB", float64(size)/float64(KB)) + default: + return fmt.Sprintf("%d Bytes", size) + } +} + +func SubSizeStrings(sizes []string) (string, error) { + size := int64(0) + for _, sizeStr := range sizes { + sizeStr := strings.ToLower(sizeStr) + if strings.Contains(sizeStr, "gb") { + sizeStr = strings.ReplaceAll(sizeStr, "gb", "") + sizeStr = strings.TrimSpace(sizeStr) + addSize, err := strconv.ParseFloat(sizeStr, 64) + if err != nil { + return "", err + } + size += int64(addSize * 1024 * 1024 * 1024) + } + if strings.Contains(sizeStr, "mb") { + sizeStr = strings.ReplaceAll(sizeStr, "mb", "") + sizeStr = strings.TrimSpace(sizeStr) + addSize, err := strconv.ParseFloat(sizeStr, 64) + if err != nil { + return "", err + } + size += int64(addSize * 1024 * 1024) + } + if strings.Contains(sizeStr, "kb") { + sizeStr = strings.ReplaceAll(sizeStr, "kb", "") + sizeStr = strings.TrimSpace(sizeStr) + addSize, err := strconv.ParseFloat(sizeStr, 64) + if err != nil { + return "", err + } + size += int64(addSize * 1024) + } + } + return FormatSize(size), nil +} diff --git a/utils/mega.go b/utils/mega.go new file mode 100644 index 0000000..20fe2d6 --- /dev/null +++ b/utils/mega.go @@ -0,0 +1,70 @@ +package utils + +import ( + "bytes" + "errors" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" +) + +func MegaDownload(url string, path string) (string, []string, error) { + stat, err := os.Stat("torrent") + if err != nil { + if os.IsNotExist(err) { + err = os.Mkdir("torrent", 0755) + if err != nil { + return "", nil, err + } + } else { + return "", nil, err + } + } + if !stat.IsDir() { + os.Remove("torrent") + err = os.Mkdir("torrent", 0755) + if err != nil { + return "", nil, err + } + } + cmd := exec.Command("mega-get", url, path) + var out bytes.Buffer + cmd.Stdout = &out + err = cmd.Run() + if err != nil { + return "", nil, err + } + pathRegex := regexp.MustCompile(`(?i)Download finished: (.*)`) + pathRegexRes := pathRegex.FindAllStringSubmatch(out.String(), -1) + if len(pathRegexRes) == 0 { + return "", nil, errors.New("Mega download failed") + } + pathRegexRes[0][1] = strings.TrimSpace(pathRegexRes[0][1]) + res, err := walkDir(pathRegexRes[0][1]) + if err != nil { + return "", nil, err + } + return pathRegexRes[0][1], res, nil +} + +func walkDir(path string) ([]string, error) { + files, err := os.ReadDir(path) + if err != nil { + return nil, err + } + res := []string{} + for _, file := range files { + if file.IsDir() { + subFiles, err := walkDir(filepath.Join(path, file.Name())) + if err != nil { + return nil, err + } + res = append(res, subFiles...) + } else { + res = append(res, filepath.Join(path, file.Name())) + } + } + return res, nil +} diff --git a/utils/ouo.go b/utils/ouo.go new file mode 100644 index 0000000..aebc995 --- /dev/null +++ b/utils/ouo.go @@ -0,0 +1,123 @@ +package utils + +import ( + "time" + + "github.com/PuerkitoBio/goquery" + http "github.com/bogdanfinn/fhttp" + tlsclient "github.com/bogdanfinn/tls-client" + "github.com/bogdanfinn/tls-client/profiles" + + "errors" + "fmt" + "io" + "net/url" + "strings" +) + +func OuoBypass(ouoURL string) (string, error) { + tempURL := strings.Replace(ouoURL, "ouo.press", "ouo.io", 1) + var res string + u, err := url.Parse(tempURL) + if err != nil { + return "", err + } + + id := tempURL[strings.LastIndex(tempURL, "/")+1:] + jar := tlsclient.NewCookieJar() + options := []tlsclient.HttpClientOption{ + tlsclient.WithTimeoutSeconds(30), + tlsclient.WithClientProfile(profiles.Chrome_110), + tlsclient.WithNotFollowRedirects(), + tlsclient.WithCookieJar(jar), + } + + client, err := tlsclient.NewHttpClient(tlsclient.NewNoopLogger(), options...) + if err != nil { + return "", err + } + + getReq, err := http.NewRequest(http.MethodGet, tempURL, nil) + if err != nil { + return "", err + } + + const chrome110UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36" + const accept = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7" + const acceptEncoding = "gzip, deflate, br, zstd" + const acceptLang = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7" + getReq.Header = http.Header{ + "accept": {accept}, + "accept-encoding": {acceptEncoding}, + "accept-language": {acceptLang}, + "upgrade-insecure-requests": {"1"}, + "user-agent": {chrome110UserAgent}, + http.HeaderOrderKey: { + "accept", + "accept-language", + "user-agent", + }, + } + + resp, err := client.Do(getReq) + if err != nil { + return "", err + } + defer func() { + if resp != nil && resp.Body != nil { + _ = resp.Body.Close() + } + }() + if resp.StatusCode == 403 { + return "", errors.New("ouo.io is blocking the request") + } + readBytes, _ := io.ReadAll(resp.Body) + data := url.Values{} + doc, _ := goquery.NewDocumentFromReader(strings.NewReader(string(readBytes))) + doc.Find("input").Each(func(i int, s *goquery.Selection) { + name, _ := s.Attr("name") + if strings.HasSuffix(name, "token") { + value, _ := s.Attr("value") + data.Add(name, value) + } + }) + nextURL := fmt.Sprintf("%s://%s/go/%s", u.Scheme, u.Host, id) + + recaptchaV3, err := ReCaptcha("https://www.google.com/recaptcha/api2/anchor?ar=1&k=6Lcr1ncUAAAAAH3cghg6cOTPGARa8adOf-y9zv2x&co=aHR0cHM6Ly9vdW8uaW86NDQz&hl=zh-CN&v=rKbTvxTxwcw5VqzrtN-ICwWt&size=invisible&cb=cuzyb4r7cdyg") + if err != nil { + return "", err + } + data.Set("x-token", recaptchaV3) + for i := 0; i < 2; i++ { + postReq, err := http.NewRequest(http.MethodPost, nextURL, strings.NewReader(data.Encode())) + if err != nil { + return "", err + } + postReq.Header = http.Header{ + "accept": {accept}, + "content-type": {"application/x-www-form-urlencoded"}, + "accept-encoding": {acceptEncoding}, + "accept-language": {acceptLang}, + "upgrade-insecure-requests": {"1"}, + "user-agent": {chrome110UserAgent}, + } + resp, err := client.Do(postReq) + if err != nil { + time.Sleep(time.Second * 3) + continue + } + defer func() { + if resp != nil && resp.Body != nil { + _ = resp.Body.Close() + } + }() + if resp.StatusCode == 302 { + res = resp.Header.Get("Location") + break + } else if resp.StatusCode == 403 { + return "", errors.New("ouo.io is blocking the request") + } + nextURL = fmt.Sprintf("%s://%s/xreallcygo/%s", u.Scheme, u.Host, id) + } + return res, nil +} diff --git a/utils/privatebin.go b/utils/privatebin.go new file mode 100644 index 0000000..a50cfaf --- /dev/null +++ b/utils/privatebin.go @@ -0,0 +1,132 @@ +package utils + +import ( + "bytes" + "compress/flate" + "crypto/aes" + "crypto/cipher" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "errors" + "io" + "strings" + + "github.com/btcsuite/btcutil/base58" + "golang.org/x/crypto/pbkdf2" +) + +type PrivateBinData struct { + Ct string `json:"ct"` + Adata []interface{} `json:"adata"` +} + +func padStart(s string, minLength int, padRune rune) string { + currentLength := len(s) + if currentLength >= minLength { + return s + } + padding := strings.Repeat(string(padRune), minLength-currentLength) + return padding + s +} + +func DecryptPrivateBin(url string, password string) (string, error) { + if !strings.Contains(url, "#") { + return "", errors.New("Missing Decrypt Key") + } + key := strings.Split(url, "#")[1] + resp, err := Fetch(FetchConfig{ + Url: url, + Headers: map[string]string{ + "Accept": "application/json, text/javascript, */*; q=0.01", + }, + }) + if err != nil { + return "", err + } + data := PrivateBinData{} + err = json.Unmarshal(resp.Data, &data) + if err != nil { + return "", err + } + type pasteJson struct { + Paste string `json:"paste"` + } + ret, err := decryptPrivateBin(key, data.Adata, data.Ct, password) + if err != nil { + return "", err + } + var j pasteJson + err = json.Unmarshal([]byte(ret), &j) + if err != nil { + return "", err + } + return j.Paste, nil +} + +func decryptPrivateBin(key string, data []interface{}, cipherMessage, password string) (string, error) { + decodedKey := base58.Decode(key) + key = padStart(string(decodedKey), 32, '\x00') + additionalData, err := json.Marshal(data) + if err != nil { + return "", err + } + spec := data[0].([]interface{}) + iterations := int(spec[2].(float64)) + iv, err := base64.StdEncoding.DecodeString(spec[0].(string)) + if err != nil { + return "", err + } + salt, err := base64.StdEncoding.DecodeString(spec[1].(string)) + if err != nil { + return "", err + } + cipherMessageBytes, err := base64.StdEncoding.DecodeString(cipherMessage) + if err != nil { + return "", err + } + + keyArray := []byte(key) + if password != "" { + if spec[7].(string) == "rawdeflate" { + hash := sha256.New() + hash.Write([]byte(password)) + password = hex.EncodeToString(hash.Sum(nil)) + } + passwordArray := []byte(password) + keyArray = append(keyArray, passwordArray...) + } + aesKeyLength := int(spec[3].(float64)) / 8 + deriveKey := pbkdf2.Key(keyArray, salt, iterations, aesKeyLength, sha256.New) + block, err := aes.NewCipher(deriveKey) + if err != nil { + return "", err + } + aesGCM, err := cipher.NewGCMWithNonceSize(block, len(iv)) + if err != nil { + return "", err + } + plaintext, err := aesGCM.Open(nil, iv, cipherMessageBytes, additionalData) + if err != nil { + return "", err + } + if len(spec) >= 8 && spec[7].(string) == "zlib" { + data, err := decompress(plaintext) + if err != nil { + return "", err + } + plaintext = data + } + return string(plaintext), err +} + +func decompress(data []byte) ([]byte, error) { + r := flate.NewReader(bytes.NewReader(data)) + defer r.Close() + decompressed, err := io.ReadAll(r) + if err != nil { + return nil, err + } + return decompressed, nil +} diff --git a/utils/reCaptcha.go b/utils/reCaptcha.go new file mode 100644 index 0000000..e97a67a --- /dev/null +++ b/utils/reCaptcha.go @@ -0,0 +1,79 @@ +package utils + +import ( + "errors" + "fmt" + "io" + "net/url" + + "regexp" + "strings" + + http "github.com/bogdanfinn/fhttp" + tlsclient "github.com/bogdanfinn/tls-client" +) + +func ReCaptcha(anchorUrl string) (string, error) { + urlBase := "https://www.google.com/recaptcha/api2/" + + matches := regexp.MustCompile(`/anchor\?(.*)`).FindStringSubmatch(anchorUrl) + if len(matches) < 2 { + return "", fmt.Errorf("no matches found in ANCHOR_URL") + } + params := matches[1] + + client, err := tlsclient.NewHttpClient(tlsclient.NewNoopLogger()) + if err != nil { + return "", err + } + req, err := http.NewRequest(http.MethodGet, anchorUrl, nil) + if err != nil { + return "", err + } + resp, err := client.Do(req) + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + return "", errors.New("recaptcha status code is not 200") + } + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", err + } + tokenMatches := regexp.MustCompile(`"recaptcha-token" value="(.*?)"`).FindStringSubmatch(string(body)) + if len(tokenMatches) < 2 { + return "", errors.New("no token found in response") + } + token := tokenMatches[1] + paramsMap, err := url.ParseQuery(params) + if err != nil { + return "", err + } + paramsMap.Set("c", token) + paramsMap.Set("reason", "q") + reloadUrl := urlBase + "reload?k=" + paramsMap.Get("k") + postReq, err := http.NewRequest(http.MethodPost, reloadUrl, strings.NewReader(paramsMap.Encode())) + if err != nil { + return "", err + } + postReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") + resp, err = client.Do(postReq) + if err != nil { + return "", err + } + defer resp.Body.Close() + body, err = io.ReadAll(resp.Body) + if err != nil { + return "", err + } + answerMatches := regexp.MustCompile(`"rresp","(.*?)"`).FindStringSubmatch(string(body)) + if len(answerMatches) < 2 { + return "", fmt.Errorf("no answer found in reCaptcha response: %s", string(body)) + } + return answerMatches[1], nil +} + +//https://www.google.com/recaptcha/api2/anchor?ar=1&k=6Lf-ZrEUAAAAAEtmR70o2Rb9JM2QUBCH4j7EzIWX&co=aHR0cHM6Ly93d3cua2VlcGxpbmtzLm9yZzo0NDM.&hl=zh-CN&v=-80zvSY9h4i8O-ocN2P5qTJk&size=normal&cb=xm7hg2ftd5e2 diff --git a/utils/similarity.go b/utils/similarity.go new file mode 100644 index 0000000..c215762 --- /dev/null +++ b/utils/similarity.go @@ -0,0 +1,72 @@ +package utils + +import "strings" + +func min(a, b, c int) int { + if a < b { + if a < c { + return a + } + return c + } + if b < c { + return b + } + return c +} + +func LevenshteinDistance(str1, str2 string) int { + str1 = strings.ToLower(str1) + str2 = strings.ToLower(str2) + s1, s2 := []rune(str1), []rune(str2) + lenS1, lenS2 := len(s1), len(s2) + if lenS1 == 0 { + return lenS2 + } + if lenS2 == 0 { + return lenS1 + } + + d := make([][]int, lenS1+1) + for i := range d { + d[i] = make([]int, lenS2+1) + } + + for i := 0; i <= lenS1; i++ { + d[i][0] = i + } + for j := 0; j <= lenS2; j++ { + d[0][j] = j + } + + for i := 1; i <= lenS1; i++ { + for j := 1; j <= lenS2; j++ { + cost := 0 + if s1[i-1] != s2[j-1] { + cost = 1 + } + d[i][j] = min(d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1]+cost) + } + } + + return d[lenS1][lenS2] +} + +func Similarity(str1, str2 string) float64 { + str1 = strings.ReplaceAll(str1, " ", "") + str2 = strings.ReplaceAll(str2, " ", "") + distance := LevenshteinDistance(str1, str2) + maxLength := len(str1) + if len(str2) > maxLength { + maxLength = len(str2) + } + + djustedMaxLength := maxLength + (len(str1) + len(str2)) + + if maxLength == 0 { + return 1.0 + } + + similarity := 1.0 - float64(distance)/float64(djustedMaxLength) + return similarity +} diff --git a/utils/unique.go b/utils/unique.go new file mode 100644 index 0000000..81553b9 --- /dev/null +++ b/utils/unique.go @@ -0,0 +1,15 @@ +package utils + +func Unique[T comparable](slice []T) []T { + seen := make(map[T]struct{}) + var result []T + + for _, v := range slice { + if _, ok := seen[v]; !ok { + seen[v] = struct{}{} + result = append(result, v) + } + } + + return result +}