diff --git a/crawler/chovka.go b/crawler/chovka.go index 35d5582..84d0aa2 100644 --- a/crawler/chovka.go +++ b/crawler/chovka.go @@ -25,6 +25,10 @@ func NewChovkaCrawler(logger *zap.Logger) *ChovkaCrawler { } } +func (c *ChovkaCrawler) Name() string { + return "ChovkaCrawler" +} + func (c *ChovkaCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { resp, err := utils.Fetch(utils.FetchConfig{ Url: url, diff --git a/crawler/crawler.go b/crawler/crawler.go index 9ac5953..fcbad8d 100644 --- a/crawler/crawler.go +++ b/crawler/crawler.go @@ -7,6 +7,7 @@ import ( ) type Crawler interface { + Name() string Crawl(int) ([]*model.GameDownload, error) CrawlAll() ([]*model.GameDownload, error) } diff --git a/crawler/dodi.go b/crawler/dodi.go index 1e64bcb..e369c83 100644 --- a/crawler/dodi.go +++ b/crawler/dodi.go @@ -27,6 +27,10 @@ func NewDODICrawler(logger *zap.Logger) *DODICrawler { } } +func (c *DODICrawler) Name() string { + return "DODICrawler" +} + func (c *DODICrawler) Crawl(page int) ([]*model.GameDownload, error) { return c.crawler.Crawl(page) } diff --git a/crawler/fitgirl.go b/crawler/fitgirl.go index da5d8e7..5fc42a1 100644 --- a/crawler/fitgirl.go +++ b/crawler/fitgirl.go @@ -26,6 +26,10 @@ func NewFitGirlCrawler(logger *zap.Logger) *FitGirlCrawler { } } +func (c *FitGirlCrawler) Name() string { + return "FitGirlCrawler" +} + func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { resp, err := utils.Fetch(utils.FetchConfig{ Url: url, diff --git a/crawler/goggames.go b/crawler/goggames.go index e25e6ec..a5ebff3 100644 --- a/crawler/goggames.go +++ b/crawler/goggames.go @@ -26,6 +26,10 @@ func NewGOGGamesCrawler(logger *zap.Logger) *GOGGamesCrawler { } } +func (c *GOGGamesCrawler) Name() string { + return "GOGGamesCrawler" +} + func (c *GOGGamesCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { resp, err := utils.Fetch(utils.FetchConfig{ Url: url, diff --git a/crawler/kaoskrew.go b/crawler/kaoskrew.go index bc7533d..c363655 100644 --- a/crawler/kaoskrew.go +++ b/crawler/kaoskrew.go @@ -26,6 +26,10 @@ func NewKaOsKrewCrawler(logger *zap.Logger) *KaOsKrewCrawler { } } +func (c *KaOsKrewCrawler) Name() string { + return "KaOsKrewCrawler" +} + func (c *KaOsKrewCrawler) Crawl(page int) ([]*model.GameDownload, error) { return c.crawler.Crawl(page) } diff --git a/crawler/onlinefix.go b/crawler/onlinefix.go index 0ed08fa..f670633 100644 --- a/crawler/onlinefix.go +++ b/crawler/onlinefix.go @@ -32,6 +32,10 @@ func NewOnlineFixCrawler(logger *zap.Logger) *OnlineFixCrawler { } } +func (c *OnlineFixCrawler) Name() string { + return "OnlineFixCrawler" +} + func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameDownload, error) { if !config.Config.OnlineFixAvaliable { c.logger.Error("Need Online Fix account") diff --git a/crawler/steamrip.go b/crawler/steamrip.go index 6771813..504814d 100644 --- a/crawler/steamrip.go +++ b/crawler/steamrip.go @@ -25,6 +25,10 @@ func NewSteamRIPCrawler(logger *zap.Logger) *SteamRIPCrawler { } } +func (c *SteamRIPCrawler) Name() string { + return "SteamRIPCrawler" +} + func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { resp, err := utils.Fetch(utils.FetchConfig{ Url: url, @@ -56,10 +60,19 @@ func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameDownload, error) { if len(megadbRegexRes) != 0 { item.Download = fmt.Sprintf("https:%s", megadbRegexRes[1]) } - gofileRegex := regexp.MustCompile(`(?i)(?:https?:)?(//gofile\.io/d/[^"]+)`) - gofileRegexRes := gofileRegex.FindStringSubmatch(string(resp.Data)) - if item.Download == "" && len(gofileRegexRes) != 0 { - item.Download = fmt.Sprintf("https:%s", gofileRegexRes[1]) + if item.Download == "" { + gofileRegex := regexp.MustCompile(`(?i)(?:https?:)?(//gofile\.io/d/[^"]+)`) + gofileRegexRes := gofileRegex.FindStringSubmatch(string(resp.Data)) + if len(gofileRegexRes) != 0 { + item.Download = fmt.Sprintf("https:%s", gofileRegexRes[1]) + } + } + if item.Download == "" { + filecryptRegex := regexp.MustCompile(`(?i)(?:https?:)?(//filecrypt\.co/Container/[^"]+)`) + filecryptRegexRes := filecryptRegex.FindStringSubmatch(string(resp.Data)) + if len(filecryptRegexRes) != 0 { + item.Download = fmt.Sprintf("https:%s", filecryptRegexRes[1]) + } } if item.Download == "" { return nil, errors.New("Failed to find download link") diff --git a/crawler/xatab.go b/crawler/xatab.go index 624d25a..42fb720 100644 --- a/crawler/xatab.go +++ b/crawler/xatab.go @@ -26,6 +26,10 @@ func NewXatabCrawler(logger *zap.Logger) *XatabCrawler { } } +func (c *XatabCrawler) Name() string { + return "XatabCrawler" +} + func (c *XatabCrawler) Crawl(page int) ([]*model.GameDownload, error) { requestURL := fmt.Sprintf("%s/page/%v", constant.XatabBaseURL, page) resp, err := utils.Fetch(utils.FetchConfig{ diff --git a/db/fitgirl.go b/db/fitgirl.go index 70702c5..1dd32fa 100644 --- a/db/fitgirl.go +++ b/db/fitgirl.go @@ -7,5 +7,5 @@ func GetFitgirlAllGameDownloads() ([]*model.GameDownload, error) { } func IsFitgirlCrawled(flag string) bool { - return IsGameCrawled(flag, "armgddn") + return IsGameCrawled(flag, "fitgirl") } diff --git a/db/game.go b/db/game.go index b769b8d..a94bb5f 100644 --- a/db/game.go +++ b/db/game.go @@ -80,7 +80,7 @@ func IsGameCrawled(flag string, author string) bool { var game model.GameDownload err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game) if err != nil { - if errors.Is(mongo.ErrNoDocuments, err) { + if errors.Is(err, mongo.ErrNoDocuments) { return false } return false @@ -97,7 +97,7 @@ func IsGameCrawledByURL(url string) bool { var game model.GameDownload err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game) if err != nil { - if errors.Is(mongo.ErrNoDocuments, err) { + if errors.Is(err, mongo.ErrNoDocuments) { return false } return false @@ -176,7 +176,7 @@ func GetGameDownloadByUrl(url string) (*model.GameDownload, error) { filter := bson.M{"url": url} err := GameDownloadCollection.FindOne(ctx, filter).Decode(&item) if err != nil { - if errors.Is(mongo.ErrNoDocuments, err) { + if errors.Is(err, mongo.ErrNoDocuments) { return &model.GameDownload{}, nil } return nil, err @@ -399,7 +399,7 @@ func DeduplicateGames() ([]primitive.ObjectID, error) { return nil, err } for _, item := range qres { - idsToDelete := item.IDs[1:] + idsToDelete := item.IDs[:len(item.IDs)-1] res = append(res, idsToDelete...) _, err = GameDownloadCollection.DeleteMany(ctx, bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: idsToDelete}}}}) if err != nil { diff --git a/db/steamrip.go b/db/steamrip.go index 5e8f0bc..46076bd 100644 --- a/db/steamrip.go +++ b/db/steamrip.go @@ -1,5 +1,5 @@ package db func IsSteamRIPCrawled(flag string) bool { - return IsGameCrawled(flag, "SteamRIP") + return IsGameCrawled(flag, "steamrip") } diff --git a/task/crawl.go b/task/crawl.go index 0413ab5..ca32dfb 100644 --- a/task/crawl.go +++ b/task/crawl.go @@ -15,16 +15,17 @@ func Crawl(logger *zap.Logger) { var games []*model.GameDownload var crawlerMap = crawler.BuildCrawlerMap(logger) for _, item := range crawlerMap { + logger.Info("Crawling", zap.String("crawler", item.Name())) if c, ok := item.(crawler.PagedCrawler); ok { g, err := c.CrawlMulti([]int{1, 2, 3}) if err != nil { - logger.Error("Failed to crawl games", zap.Error(err)) + logger.Error("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err)) } games = append(games, g...) } else if c, ok := item.(crawler.SimpleCrawler); ok { g, err := c.CrawlAll() if err != nil { - logger.Error("Failed to crawl games", zap.Error(err)) + logger.Error("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err)) } games = append(games, g...) }