2024-09-24 06:17:11 -04:00
|
|
|
package crawler
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"regexp"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
|
2024-11-20 06:09:04 -05:00
|
|
|
"pcgamedb/constant"
|
|
|
|
"pcgamedb/db"
|
|
|
|
"pcgamedb/model"
|
|
|
|
"pcgamedb/utils"
|
2024-11-15 02:02:45 -05:00
|
|
|
|
2024-09-24 06:17:11 -04:00
|
|
|
"github.com/PuerkitoBio/goquery"
|
|
|
|
"go.uber.org/zap"
|
|
|
|
)
|
|
|
|
|
|
|
|
type FitGirlCrawler struct {
|
|
|
|
logger *zap.Logger
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewFitGirlCrawler(logger *zap.Logger) *FitGirlCrawler {
|
|
|
|
return &FitGirlCrawler{
|
|
|
|
logger: logger,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-11-14 12:29:19 -05:00
|
|
|
func (c *FitGirlCrawler) Name() string {
|
|
|
|
return "FitGirlCrawler"
|
|
|
|
}
|
|
|
|
|
2024-12-02 03:17:01 -05:00
|
|
|
func (c *FitGirlCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
|
|
|
resp, err := utils.Request().Get(URL)
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-12-02 03:17:01 -05:00
|
|
|
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
titleElem := doc.Find("h3").First().Find("strong")
|
|
|
|
if titleElem.Length() == 0 {
|
2024-11-21 12:30:26 -05:00
|
|
|
return nil, errors.New("failed to find title")
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
rawTitle := titleElem.Text()
|
|
|
|
titleElem.Children().Remove()
|
|
|
|
title := strings.TrimSpace(titleElem.Text())
|
|
|
|
sizeRegex := regexp.MustCompile(`Repack Size: <strong>(.*?)</strong>`)
|
2024-12-02 03:17:01 -05:00
|
|
|
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Body()))
|
2024-09-24 06:17:11 -04:00
|
|
|
if len(sizeRegexRes) == 0 {
|
2024-11-21 12:30:26 -05:00
|
|
|
return nil, errors.New("failed to find size")
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
size := sizeRegexRes[1]
|
|
|
|
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
|
2024-12-02 03:17:01 -05:00
|
|
|
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Body()))
|
2024-09-24 06:17:11 -04:00
|
|
|
if len(magnetRegexRes) == 0 {
|
2024-11-21 12:30:26 -05:00
|
|
|
return nil, errors.New("failed to find magnet")
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
magnet := magnetRegexRes[0]
|
2024-12-02 03:17:01 -05:00
|
|
|
item, err := db.GetGameItemByUrl(URL)
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
item.Name = strings.TrimSpace(title)
|
|
|
|
item.RawName = rawTitle
|
2024-12-02 03:17:01 -05:00
|
|
|
item.Url = URL
|
2024-09-24 06:17:11 -04:00
|
|
|
item.Size = size
|
|
|
|
item.Author = "FitGirl"
|
|
|
|
item.Download = magnet
|
2024-12-21 11:37:00 -05:00
|
|
|
item.Platform = "windows"
|
2024-09-24 06:17:11 -04:00
|
|
|
return item, nil
|
|
|
|
}
|
|
|
|
|
2024-11-16 00:48:48 -05:00
|
|
|
func (c *FitGirlCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
2024-12-02 03:17:01 -05:00
|
|
|
resp, err := utils.Request().Get(fmt.Sprintf(constant.FitGirlURL, page))
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
c.logger.Error("Failed to fetch", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-12-02 03:17:01 -05:00
|
|
|
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-11-21 12:30:26 -05:00
|
|
|
var urls []string
|
|
|
|
var updateFlags []string //link+date
|
2024-09-24 06:17:11 -04:00
|
|
|
doc.Find("article").Each(func(i int, s *goquery.Selection) {
|
|
|
|
u, exist1 := s.Find(".entry-title>a").First().Attr("href")
|
|
|
|
d, exist2 := s.Find("time").First().Attr("datetime")
|
|
|
|
if exist1 && exist2 {
|
|
|
|
urls = append(urls, u)
|
|
|
|
updateFlags = append(updateFlags, fmt.Sprintf("%s%s", u, d))
|
|
|
|
}
|
|
|
|
})
|
2024-11-16 00:48:48 -05:00
|
|
|
var res []*model.GameItem
|
2024-09-24 06:17:11 -04:00
|
|
|
for i, u := range urls {
|
|
|
|
if db.IsFitgirlCrawled(updateFlags[i]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
c.logger.Info("Crawling", zap.String("URL", u))
|
|
|
|
item, err := c.CrawlByUrl(u)
|
|
|
|
if err != nil {
|
|
|
|
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
item.UpdateFlag = updateFlags[i]
|
2024-11-16 00:48:48 -05:00
|
|
|
err = db.SaveGameItem(item)
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
c.logger.Warn("Failed to save", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
res = append(res, item)
|
2024-11-21 12:30:26 -05:00
|
|
|
if err := OrganizeGameItem(item); err != nil {
|
2024-09-24 06:17:11 -04:00
|
|
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return res, nil
|
|
|
|
}
|
|
|
|
|
2024-11-16 00:48:48 -05:00
|
|
|
func (c *FitGirlCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
|
|
|
|
var res []*model.GameItem
|
2024-09-24 06:17:11 -04:00
|
|
|
for _, page := range pages {
|
|
|
|
items, err := c.Crawl(page)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
res = append(res, items...)
|
|
|
|
}
|
|
|
|
return res, nil
|
|
|
|
}
|
|
|
|
|
2024-11-16 00:48:48 -05:00
|
|
|
func (c *FitGirlCrawler) CrawlAll() ([]*model.GameItem, error) {
|
|
|
|
var res []*model.GameItem
|
2024-09-24 06:17:11 -04:00
|
|
|
totalPageNum, err := c.GetTotalPageNum()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for i := 1; i <= totalPageNum; i++ {
|
|
|
|
items, err := c.Crawl(i)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
res = append(res, items...)
|
|
|
|
}
|
|
|
|
return res, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *FitGirlCrawler) GetTotalPageNum() (int, error) {
|
2024-12-02 03:17:01 -05:00
|
|
|
resp, err := utils.Request().Get(fmt.Sprintf(constant.FitGirlURL, 1))
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
2024-12-02 03:17:01 -05:00
|
|
|
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
2024-09-24 06:17:11 -04:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
page, err := strconv.Atoi(doc.Find(".page-numbers.dots").First().Next().Text())
|
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
return page, nil
|
|
|
|
}
|