fix: fitgril crawler
This commit is contained in:
parent
ced893ce76
commit
5f24397d80
@ -25,6 +25,10 @@ func NewChovkaCrawler(logger *zap.Logger) *ChovkaCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *ChovkaCrawler) Name() string {
|
||||||
|
return "ChovkaCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *ChovkaCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
func (c *ChovkaCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
resp, err := utils.Fetch(utils.FetchConfig{
|
||||||
Url: url,
|
Url: url,
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Crawler interface {
|
type Crawler interface {
|
||||||
|
Name() string
|
||||||
Crawl(int) ([]*model.GameDownload, error)
|
Crawl(int) ([]*model.GameDownload, error)
|
||||||
CrawlAll() ([]*model.GameDownload, error)
|
CrawlAll() ([]*model.GameDownload, error)
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,10 @@ func NewDODICrawler(logger *zap.Logger) *DODICrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *DODICrawler) Name() string {
|
||||||
|
return "DODICrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *DODICrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
func (c *DODICrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
||||||
return c.crawler.Crawl(page)
|
return c.crawler.Crawl(page)
|
||||||
}
|
}
|
||||||
|
@ -26,6 +26,10 @@ func NewFitGirlCrawler(logger *zap.Logger) *FitGirlCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *FitGirlCrawler) Name() string {
|
||||||
|
return "FitGirlCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
resp, err := utils.Fetch(utils.FetchConfig{
|
||||||
Url: url,
|
Url: url,
|
||||||
|
@ -26,6 +26,10 @@ func NewGOGGamesCrawler(logger *zap.Logger) *GOGGamesCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *GOGGamesCrawler) Name() string {
|
||||||
|
return "GOGGamesCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *GOGGamesCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
func (c *GOGGamesCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
resp, err := utils.Fetch(utils.FetchConfig{
|
||||||
Url: url,
|
Url: url,
|
||||||
|
@ -26,6 +26,10 @@ func NewKaOsKrewCrawler(logger *zap.Logger) *KaOsKrewCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *KaOsKrewCrawler) Name() string {
|
||||||
|
return "KaOsKrewCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *KaOsKrewCrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
func (c *KaOsKrewCrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
||||||
return c.crawler.Crawl(page)
|
return c.crawler.Crawl(page)
|
||||||
}
|
}
|
||||||
|
@ -32,6 +32,10 @@ func NewOnlineFixCrawler(logger *zap.Logger) *OnlineFixCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *OnlineFixCrawler) Name() string {
|
||||||
|
return "OnlineFixCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
||||||
if !config.Config.OnlineFixAvaliable {
|
if !config.Config.OnlineFixAvaliable {
|
||||||
c.logger.Error("Need Online Fix account")
|
c.logger.Error("Need Online Fix account")
|
||||||
|
@ -25,6 +25,10 @@ func NewSteamRIPCrawler(logger *zap.Logger) *SteamRIPCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *SteamRIPCrawler) Name() string {
|
||||||
|
return "SteamRIPCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
resp, err := utils.Fetch(utils.FetchConfig{
|
||||||
Url: url,
|
Url: url,
|
||||||
@ -56,11 +60,20 @@ func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameDownload, error) {
|
|||||||
if len(megadbRegexRes) != 0 {
|
if len(megadbRegexRes) != 0 {
|
||||||
item.Download = fmt.Sprintf("https:%s", megadbRegexRes[1])
|
item.Download = fmt.Sprintf("https:%s", megadbRegexRes[1])
|
||||||
}
|
}
|
||||||
|
if item.Download == "" {
|
||||||
gofileRegex := regexp.MustCompile(`(?i)(?:https?:)?(//gofile\.io/d/[^"]+)`)
|
gofileRegex := regexp.MustCompile(`(?i)(?:https?:)?(//gofile\.io/d/[^"]+)`)
|
||||||
gofileRegexRes := gofileRegex.FindStringSubmatch(string(resp.Data))
|
gofileRegexRes := gofileRegex.FindStringSubmatch(string(resp.Data))
|
||||||
if item.Download == "" && len(gofileRegexRes) != 0 {
|
if len(gofileRegexRes) != 0 {
|
||||||
item.Download = fmt.Sprintf("https:%s", gofileRegexRes[1])
|
item.Download = fmt.Sprintf("https:%s", gofileRegexRes[1])
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if item.Download == "" {
|
||||||
|
filecryptRegex := regexp.MustCompile(`(?i)(?:https?:)?(//filecrypt\.co/Container/[^"]+)`)
|
||||||
|
filecryptRegexRes := filecryptRegex.FindStringSubmatch(string(resp.Data))
|
||||||
|
if len(filecryptRegexRes) != 0 {
|
||||||
|
item.Download = fmt.Sprintf("https:%s", filecryptRegexRes[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
if item.Download == "" {
|
if item.Download == "" {
|
||||||
return nil, errors.New("Failed to find download link")
|
return nil, errors.New("Failed to find download link")
|
||||||
}
|
}
|
||||||
|
@ -26,6 +26,10 @@ func NewXatabCrawler(logger *zap.Logger) *XatabCrawler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *XatabCrawler) Name() string {
|
||||||
|
return "XatabCrawler"
|
||||||
|
}
|
||||||
|
|
||||||
func (c *XatabCrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
func (c *XatabCrawler) Crawl(page int) ([]*model.GameDownload, error) {
|
||||||
requestURL := fmt.Sprintf("%s/page/%v", constant.XatabBaseURL, page)
|
requestURL := fmt.Sprintf("%s/page/%v", constant.XatabBaseURL, page)
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
resp, err := utils.Fetch(utils.FetchConfig{
|
||||||
|
@ -7,5 +7,5 @@ func GetFitgirlAllGameDownloads() ([]*model.GameDownload, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func IsFitgirlCrawled(flag string) bool {
|
func IsFitgirlCrawled(flag string) bool {
|
||||||
return IsGameCrawled(flag, "armgddn")
|
return IsGameCrawled(flag, "fitgirl")
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ func IsGameCrawled(flag string, author string) bool {
|
|||||||
var game model.GameDownload
|
var game model.GameDownload
|
||||||
err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game)
|
err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(mongo.ErrNoDocuments, err) {
|
if errors.Is(err, mongo.ErrNoDocuments) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
@ -97,7 +97,7 @@ func IsGameCrawledByURL(url string) bool {
|
|||||||
var game model.GameDownload
|
var game model.GameDownload
|
||||||
err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game)
|
err := GameDownloadCollection.FindOne(ctx, filter).Decode(&game)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(mongo.ErrNoDocuments, err) {
|
if errors.Is(err, mongo.ErrNoDocuments) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
@ -176,7 +176,7 @@ func GetGameDownloadByUrl(url string) (*model.GameDownload, error) {
|
|||||||
filter := bson.M{"url": url}
|
filter := bson.M{"url": url}
|
||||||
err := GameDownloadCollection.FindOne(ctx, filter).Decode(&item)
|
err := GameDownloadCollection.FindOne(ctx, filter).Decode(&item)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(mongo.ErrNoDocuments, err) {
|
if errors.Is(err, mongo.ErrNoDocuments) {
|
||||||
return &model.GameDownload{}, nil
|
return &model.GameDownload{}, nil
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -399,7 +399,7 @@ func DeduplicateGames() ([]primitive.ObjectID, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, item := range qres {
|
for _, item := range qres {
|
||||||
idsToDelete := item.IDs[1:]
|
idsToDelete := item.IDs[:len(item.IDs)-1]
|
||||||
res = append(res, idsToDelete...)
|
res = append(res, idsToDelete...)
|
||||||
_, err = GameDownloadCollection.DeleteMany(ctx, bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: idsToDelete}}}})
|
_, err = GameDownloadCollection.DeleteMany(ctx, bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: idsToDelete}}}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
package db
|
package db
|
||||||
|
|
||||||
func IsSteamRIPCrawled(flag string) bool {
|
func IsSteamRIPCrawled(flag string) bool {
|
||||||
return IsGameCrawled(flag, "SteamRIP")
|
return IsGameCrawled(flag, "steamrip")
|
||||||
}
|
}
|
||||||
|
@ -15,16 +15,17 @@ func Crawl(logger *zap.Logger) {
|
|||||||
var games []*model.GameDownload
|
var games []*model.GameDownload
|
||||||
var crawlerMap = crawler.BuildCrawlerMap(logger)
|
var crawlerMap = crawler.BuildCrawlerMap(logger)
|
||||||
for _, item := range crawlerMap {
|
for _, item := range crawlerMap {
|
||||||
|
logger.Info("Crawling", zap.String("crawler", item.Name()))
|
||||||
if c, ok := item.(crawler.PagedCrawler); ok {
|
if c, ok := item.(crawler.PagedCrawler); ok {
|
||||||
g, err := c.CrawlMulti([]int{1, 2, 3})
|
g, err := c.CrawlMulti([]int{1, 2, 3})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Failed to crawl games", zap.Error(err))
|
logger.Error("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err))
|
||||||
}
|
}
|
||||||
games = append(games, g...)
|
games = append(games, g...)
|
||||||
} else if c, ok := item.(crawler.SimpleCrawler); ok {
|
} else if c, ok := item.(crawler.SimpleCrawler); ok {
|
||||||
g, err := c.CrawlAll()
|
g, err := c.CrawlAll()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Failed to crawl games", zap.Error(err))
|
logger.Error("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err))
|
||||||
}
|
}
|
||||||
games = append(games, g...)
|
games = append(games, g...)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user