game-crawler/crawler/fitgirl.go
2024-12-29 13:17:04 +08:00

209 lines
4.8 KiB
Go

package crawler
import (
"bytes"
"encoding/base64"
"errors"
"fmt"
"regexp"
"strconv"
"strings"
"game-crawler/constant"
"game-crawler/db"
"game-crawler/model"
"game-crawler/utils"
"github.com/PuerkitoBio/goquery"
"go.uber.org/zap"
)
type FitGirlCrawler struct {
*BaseLogger
*BaseError
logger *zap.Logger
}
func NewFitGirlCrawler(logger *zap.Logger) *FitGirlCrawler {
return &FitGirlCrawler{
BaseLogger: &BaseLogger{
logger: logger,
},
BaseError: &BaseError{},
logger: logger,
}
}
func (c *FitGirlCrawler) Name() string {
return "FitGirlCrawler"
}
func (c *FitGirlCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
c.LogCrawlByUrlStart(URL)
resp, err := utils.Request().SetLogger(c.logger.Sugar()).Get(URL)
if err != nil {
return nil, c.ErrRequest(URL, err)
}
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
if err != nil {
return nil, c.ErrParseDoc(URL, err)
}
titleElem := doc.Find("h3").First().Find("strong")
if titleElem.Length() == 0 {
return nil, c.ErrGetGameItemDetail(URL, errors.New("failed to find title element"))
}
rawTitle := titleElem.Text()
titleElem.Children().Remove()
title := strings.TrimSpace(titleElem.Text())
sizeRegex := regexp.MustCompile(`Repack Size: <strong>(.*?)</strong>`)
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Body()))
size := "unknown"
if len(sizeRegexRes) != 0 {
size = sizeRegexRes[1]
}
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Body()))
if len(magnetRegexRes) == 0 {
return nil, c.ErrGetGameItemDetail(URL, errors.New("failed to find magnet link"))
}
magnet := magnetRegexRes[0]
item, err := db.GetGameItemByUrl(URL)
if err != nil {
return nil, c.ErrDBQuery(err)
}
item.Name = strings.TrimSpace(title)
item.RawName = rawTitle
item.Url = URL
item.Size = size
item.Author = "FitGirl"
item.Downloads = map[string]string{
"magnet": magnet,
}
item.Platform = "windows"
return item, nil
}
func (c *FitGirlCrawler) Crawl(page int) ([]*model.GameItem, error) {
c.LogCrawlStart(page)
resp, err := utils.Request().SetLogger(c.logger.Sugar()).Get(fmt.Sprintf(constant.FitGirlURL, page))
if err != nil {
return nil, c.ErrRequest(fmt.Sprintf(constant.FitGirlURL, page), err)
}
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
if err != nil {
return nil, c.ErrParseDoc(fmt.Sprintf(constant.FitGirlURL, page), err)
}
var urls []string
var updateFlags []string // link + date
doc.Find("article").Each(func(i int, s *goquery.Selection) {
u, exist1 := s.Find(".entry-title>a").First().Attr("href")
d, exist2 := s.Find("time").First().Attr("datetime")
if exist1 && exist2 {
urls = append(urls, u)
updateFlags = append(updateFlags, base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s%s", u, d))))
}
})
var res []*model.GameItem
for i, u := range urls {
if db.IsFitgirlCrawled(updateFlags[i]) {
c.LogCrawlSkip(u)
continue
}
item, err := c.CrawlByUrl(u)
if err != nil {
c.LogCrawlByUrlError(u, err)
continue
}
item.UpdateFlag = updateFlags[i]
err = db.SaveGameItem(item)
if err != nil {
c.LogSaveGameItemError(u, err)
continue
}
res = append(res, item)
if err := OrganizeGameItem(item); err != nil {
c.LogOrganizeGameWarn(item.ID, item.Name, err)
continue
}
}
return res, nil
}
func (c *FitGirlCrawler) CrawlMulti(pages []int) ([]*model.GameItem, error) {
totalPageNum, err := c.GetTotalPageNum()
if err != nil {
return nil, c.ErrGetTotalPageNum(err)
}
var res []*model.GameItem
for _, page := range pages {
if page > totalPageNum || page < 1 {
c.LogPageExceedWarn(page)
continue
}
items, err := c.Crawl(page)
if err != nil {
c.LogCrawlError(page, err)
continue
}
res = append(res, items...)
}
return res, nil
}
func (c *FitGirlCrawler) CrawlAll() ([]*model.GameItem, error) {
totalPageNum, err := c.GetTotalPageNum()
if err != nil {
return nil, c.ErrGetTotalPageNum(err)
}
var res []*model.GameItem
for i := 1; i <= totalPageNum; i++ {
items, err := c.Crawl(i)
if err != nil {
c.LogCrawlError(i, err)
continue
}
res = append(res, items...)
}
return res, nil
}
func (c *FitGirlCrawler) GetTotalPageNum() (int, error) {
resp, err := utils.Request().SetLogger(c.logger.Sugar()).Get(fmt.Sprintf(constant.FitGirlURL, 1))
if err != nil {
return 0, c.ErrRequest(fmt.Sprintf(constant.FitGirlURL, 1), err)
}
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
if err != nil {
return 0, c.ErrParseDoc(fmt.Sprintf(constant.FitGirlURL, 1), err)
}
pageStr := doc.Find(".page-numbers.dots").First().Next().Text()
totalPageNum, err := strconv.Atoi(pageStr)
if err != nil {
return 0, c.ErrParseInt(pageStr, err)
}
return totalPageNum, nil
}