170 lines
4.3 KiB
Go
170 lines
4.3 KiB
Go
package crawler
|
|
|
|
import (
|
|
"bytes"
|
|
"errors"
|
|
"fmt"
|
|
"regexp"
|
|
"strconv"
|
|
"strings"
|
|
|
|
"pcgamedb/constant"
|
|
"pcgamedb/db"
|
|
"pcgamedb/model"
|
|
"pcgamedb/utils"
|
|
|
|
"github.com/PuerkitoBio/goquery"
|
|
"go.uber.org/zap"
|
|
)
|
|
|
|
type Formatter func(string) string
|
|
|
|
type s1337xCrawler struct {
|
|
source string
|
|
platform string
|
|
formatter Formatter
|
|
logger *zap.Logger
|
|
}
|
|
|
|
func New1337xCrawler(source string, platform string, formatter Formatter, logger *zap.Logger) *s1337xCrawler {
|
|
return &s1337xCrawler{
|
|
source: source,
|
|
formatter: formatter,
|
|
logger: logger,
|
|
platform: platform,
|
|
}
|
|
}
|
|
|
|
func (c *s1337xCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|
var doc *goquery.Document
|
|
requestUrl := fmt.Sprintf("%s/%s/%d/", constant.C1337xBaseURL, c.source, page)
|
|
resp, err := utils.Request().Get(requestUrl)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
doc, err = goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
trSelection := doc.Find("tbody>tr")
|
|
var urls []string
|
|
trSelection.Each(func(i int, trNode *goquery.Selection) {
|
|
nameSelection := trNode.Find(".name").First()
|
|
if aNode := nameSelection.Find("a").Eq(1); aNode.Length() > 0 {
|
|
url, _ := aNode.Attr("href")
|
|
urls = append(urls, url)
|
|
}
|
|
})
|
|
var res []*model.GameItem
|
|
for _, u := range urls {
|
|
u = fmt.Sprintf("%s%s", constant.C1337xBaseURL, u)
|
|
if db.IsGameCrawledByURL(u) {
|
|
continue
|
|
}
|
|
c.logger.Info("Crawling", zap.String("URL", u))
|
|
item, err := c.CrawlByUrl(u)
|
|
if err != nil {
|
|
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
|
continue
|
|
}
|
|
err = db.SaveGameItem(item)
|
|
if err != nil {
|
|
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
continue
|
|
}
|
|
res = append(res, item)
|
|
if err := OrganizeGameItem(item); err != nil {
|
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
|
continue
|
|
}
|
|
}
|
|
return res, nil
|
|
}
|
|
|
|
func (c *s1337xCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
|
resp, err := utils.Request().Get(URL)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
var item = &model.GameItem{}
|
|
item.Url = URL
|
|
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
selection := doc.Find(".torrent-detail-page ul.list>li")
|
|
info := make(map[string]string)
|
|
selection.Each(func(i int, item *goquery.Selection) {
|
|
info[strings.TrimSpace(item.Find("strong").Text())] = strings.TrimSpace(item.Find("span").Text())
|
|
})
|
|
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
|
|
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Body()))
|
|
item.Size = info["Total size"]
|
|
item.RawName = doc.Find("title").Text()
|
|
item.RawName = strings.Replace(item.RawName, "Download ", "", 1)
|
|
item.RawName = strings.TrimSpace(strings.Replace(item.RawName, "Torrent | 1337x", " ", 1))
|
|
item.Name = c.formatter(item.RawName)
|
|
item.Download = magnetRegexRes[0]
|
|
item.Author = strings.Replace(c.source, "-torrents", "", -1)
|
|
item.Platform = c.platform
|
|
return item, nil
|
|
}
|
|
|
|
func (c *s1337xCrawler) CrawlMulti(pages []int) (res []*model.GameItem, err error) {
|
|
var items []*model.GameItem
|
|
totalPageNum, err := c.GetTotalPageNum()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
for _, page := range pages {
|
|
if page > totalPageNum {
|
|
continue
|
|
}
|
|
items, err = c.Crawl(page)
|
|
res = append(res, items...)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
}
|
|
return res, nil
|
|
}
|
|
|
|
func (c *s1337xCrawler) CrawlAll() (res []*model.GameItem, err error) {
|
|
totalPageNum, err := c.GetTotalPageNum()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
var items []*model.GameItem
|
|
for i := 1; i <= totalPageNum; i++ {
|
|
items, err = c.Crawl(i)
|
|
res = append(res, items...)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
}
|
|
return res, nil
|
|
}
|
|
|
|
func (c *s1337xCrawler) GetTotalPageNum() (int, error) {
|
|
var doc *goquery.Document
|
|
|
|
requestUrl := fmt.Sprintf("%s/%s/%d/", constant.C1337xBaseURL, c.source, 1)
|
|
resp, err := utils.Request().Get(requestUrl)
|
|
if err != nil {
|
|
return 0, err
|
|
}
|
|
doc, _ = goquery.NewDocumentFromReader(bytes.NewReader(resp.Body()))
|
|
selection := doc.Find(".last")
|
|
pageStr, exist := selection.Find("a").Attr("href")
|
|
if !exist {
|
|
return 0, errors.New("total page num not found")
|
|
}
|
|
pageStr = strings.ReplaceAll(pageStr, c.source, "")
|
|
pageStr = strings.ReplaceAll(pageStr, "/", "")
|
|
totalPageNum, err := strconv.Atoi(pageStr)
|
|
if err != nil {
|
|
return 0, err
|
|
}
|
|
return totalPageNum, nil
|
|
}
|