fix onlinefix

This commit is contained in:
Nite07 2024-12-10 21:37:16 +08:00
parent 8c3b59d622
commit cdf263b611
5 changed files with 118 additions and 64 deletions

View File

@ -47,7 +47,7 @@ func (c *FreeGOGCrawler) getSession() (*ccs.Session, error) {
} }
jsonBytes, err := json.Marshal(session) jsonBytes, err := json.Marshal(session)
if err == nil { if err == nil {
_ = cache.SetWithExpire("freegog_waf_session", jsonBytes, 24*time.Hour) _ = cache.SetWithExpire("freegog_waf_session", jsonBytes, 1*time.Hour)
} }
} }
return &session, nil return &session, nil

View File

@ -11,7 +11,9 @@ import (
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"time"
"pcgamedb/cache"
"pcgamedb/config" "pcgamedb/config"
"pcgamedb/constant" "pcgamedb/constant"
"pcgamedb/db" "pcgamedb/db"
@ -23,14 +25,12 @@ import (
) )
type OnlineFixCrawler struct { type OnlineFixCrawler struct {
logger *zap.Logger logger *zap.Logger
cookies []*http.Cookie
} }
func NewOnlineFixCrawler(logger *zap.Logger) *OnlineFixCrawler { func NewOnlineFixCrawler(logger *zap.Logger) *OnlineFixCrawler {
return &OnlineFixCrawler{ return &OnlineFixCrawler{
logger: logger, logger: logger,
cookies: []*http.Cookie{},
} }
} }
@ -49,7 +49,8 @@ func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameItem, error) {
c.logger.Error("Failed to fetch", zap.Error(err)) c.logger.Error("Failed to fetch", zap.Error(err))
return nil, err return nil, err
} }
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Body())) body := utils.Windows1251ToUTF8(resp.Body())
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(body))
if err != nil { if err != nil {
c.logger.Error("Failed to parse HTML", zap.Error(err)) c.logger.Error("Failed to parse HTML", zap.Error(err))
return nil, err return nil, err
@ -102,13 +103,14 @@ func (c *OnlineFixCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
body := utils.Windows1251ToUTF8(resp.Body())
titleRegex := regexp.MustCompile(`(?i)<h1.*?>(.*?)</h1>`) titleRegex := regexp.MustCompile(`(?i)<h1.*?>(.*?)</h1>`)
titleRegexRes := titleRegex.FindAllStringSubmatch(string(resp.Body()), -1) titleRegexRes := titleRegex.FindAllStringSubmatch(string(body), -1)
if len(titleRegexRes) == 0 { if len(titleRegexRes) == 0 {
return nil, errors.New("failed to find title") return nil, errors.New("failed to find title")
} }
downloadRegex := regexp.MustCompile(`(?i)<a[^>]*\bhref="([^"]+)"[^>]*>(Скачать Torrent|Скачать торрент)</a>`) downloadRegex := regexp.MustCompile(`(?i)<a[^>]+\bhref="([^"]+)"[^>]+>(Скачать Torrent|Скачать торрент)</a>`)
downloadRegexRes := downloadRegex.FindAllStringSubmatch(string(resp.Body()), -1) downloadRegexRes := downloadRegex.FindAllStringSubmatch(string(body), -1)
if len(downloadRegexRes) == 0 { if len(downloadRegexRes) == 0 {
return nil, errors.New("failed to find download button") return nil, errors.New("failed to find download button")
} }
@ -122,12 +124,13 @@ func (c *OnlineFixCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
item.Author = "OnlineFix" item.Author = "OnlineFix"
item.Size = "0" item.Size = "0"
resp, err = utils.Request().SetHeader("Referer", URL).SetCookies(cookies).Get(downloadRegexRes[0][1]) resp, err = utils.Request().SetHeader("Referer", URL).SetCookies(cookies).Get(downloadRegexRes[0][1])
body = utils.Windows1251ToUTF8(resp.Body())
if err != nil { if err != nil {
return nil, err return nil, err
} }
if strings.Contains(downloadRegexRes[0][1], "uploads.online-fix.me") { if strings.Contains(downloadRegexRes[0][1], "uploads.online-fix.me") {
magnetRegex := regexp.MustCompile(`(?i)"(.*?).torrent"`) magnetRegex := regexp.MustCompile(`(?i)"(.*?).torrent"`)
magnetRegexRes := magnetRegex.FindAllStringSubmatch(string(resp.Body()), -1) magnetRegexRes := magnetRegex.FindAllStringSubmatch(string(body), -1)
if len(magnetRegexRes) == 0 { if len(magnetRegexRes) == 0 {
return nil, errors.New("failed to find magnet") return nil, errors.New("failed to find magnet")
} }
@ -140,12 +143,12 @@ func (c *OnlineFixCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
return nil, err return nil, err
} }
} else if strings.Contains(downloadRegexRes[0][1], "online-fix.me/ext") { } else if strings.Contains(downloadRegexRes[0][1], "online-fix.me/ext") {
if strings.Contains(string(resp.Body()), "mega.nz") { if strings.Contains(string(body), "mega.nz") {
if !config.Config.MegaAvaliable { if !config.Config.MegaAvaliable {
return nil, errors.New("mega is not avaliable") return nil, errors.New("mega is not avaliable")
} }
megaRegex := regexp.MustCompile(`(?i)location.href=\\'([^\\']*)\\'`) megaRegex := regexp.MustCompile(`(?i)location.href=\\'([^\\']*)\\'`)
megaRegexRes := megaRegex.FindAllStringSubmatch(string(resp.Body()), -1) megaRegexRes := megaRegex.FindAllStringSubmatch(string(body), -1)
if len(megaRegexRes) == 0 { if len(megaRegexRes) == 0 {
return nil, errors.New("failed to find download link") return nil, errors.New("failed to find download link")
} }
@ -229,37 +232,47 @@ type csrf struct {
} }
func (c *OnlineFixCrawler) getCookies() ([]*http.Cookie, error) { func (c *OnlineFixCrawler) getCookies() ([]*http.Cookie, error) {
if c.cookies == nil { val, exists := cache.Get("onlinefix_cookies")
resp, err := utils.Request().SetHeaders(map[string]string{ if exists {
"X-Requested-With": "XMLHttpRequest", var cookies []*http.Cookie
"Referer": constant.OnlineFixURL, if err := json.Unmarshal([]byte(val), &cookies); err != nil {
}).Get(constant.OnlineFixCSRFURL)
if err != nil {
return nil, err return nil, err
} }
var csrf csrf return cookies, nil
if err = json.Unmarshal(resp.Body(), &csrf); err != nil {
return nil, err
}
c.cookies = resp.Cookies()
params := url.Values{}
params.Add("login_name", config.Config.OnlineFix.User)
params.Add("login_password", config.Config.OnlineFix.Password)
params.Add(csrf.Field, csrf.Value)
params.Add("login", "submit")
resp, err = utils.Request().SetHeaders(map[string]string{
"Origin": constant.OnlineFixURL,
"Content-Type": "application/x-www-form-urlencoded",
"Referer": constant.OnlineFixURL,
}).SetCookies(c.cookies).SetBody(params).Post(constant.OnlineFixURL)
if err != nil {
return nil, err
}
c.cookies = append(c.cookies, resp.Cookies()...)
} }
return c.cookies, nil
resp, err := utils.Request().SetHeaders(map[string]string{
"X-Requested-With": "XMLHttpRequest",
"Referer": constant.OnlineFixURL,
}).Get(constant.OnlineFixCSRFURL)
if err != nil {
return nil, err
}
var csrf csrf
if err = json.Unmarshal(resp.Body(), &csrf); err != nil {
return nil, err
}
cookies := resp.Cookies()
params := url.Values{}
params.Add("login_name", config.Config.OnlineFix.User)
params.Add("login_password", config.Config.OnlineFix.Password)
params.Add(csrf.Field, csrf.Value)
params.Add("login", "submit")
resp, err = utils.Request().SetHeaders(map[string]string{
"Origin": constant.OnlineFixURL,
"Content-Type": "application/x-www-form-urlencoded",
"Referer": constant.OnlineFixURL,
}).SetCookies(cookies).SetBody(params.Encode()).Post(constant.OnlineFixURL)
if err != nil {
return nil, err
}
cookies = resp.Cookies()
jsonBytes, _ := json.Marshal(cookies)
_ = cache.SetWithExpire("onlinefix_cookies", string(jsonBytes), time.Hour)
return cookies, nil
} }
func OnlineFixFormatter(name string) string { func OnlineFixFormatter(name string) string {

View File

@ -1,12 +1,10 @@
package middleware package middleware
import ( import (
"strconv" "pcgamedb/log"
"strings" "strings"
"time" "time"
"pcgamedb/log"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"go.uber.org/zap" "go.uber.org/zap"
) )
@ -14,34 +12,61 @@ import (
func Logger() gin.HandlerFunc { func Logger() gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
startTime := time.Now() startTime := time.Now()
path := c.Request.URL.Path
raw := c.Request.URL.RawQuery
c.Next() c.Next()
endTime := time.Now() if shouldSkipLog(path) {
latencyTime := endTime.Sub(startTime).Milliseconds()
reqMethod := c.Request.Method
reqURI := c.Request.RequestURI
statusCode := c.Writer.Status()
clientIP := c.ClientIP()
if strings.HasPrefix(reqURI, "/swagger/") ||
strings.EqualFold(reqURI, "/favicon.ico") {
return return
} }
log.Logger.Info( if raw != "" {
"request", path = path + "?" + raw
zap.Int("code", statusCode), }
zap.String("method", reqMethod),
zap.String("uri", reqURI), fields := []zap.Field{
zap.String("ip", clientIP), zap.Int("status", c.Writer.Status()),
zap.String("latency", strconv.Itoa(int(latencyTime))+"ms"), zap.String("method", c.Request.Method),
) zap.String("path", path),
zap.String("ip", getRealIP(c)),
zap.Duration("latency", time.Since(startTime)),
}
if len(c.Errors) > 0 { if len(c.Errors) > 0 {
for _, e := range c.Errors.Errors() { fields = append(fields, zap.Strings("errors", c.Errors.Errors()))
log.Logger.Error(e)
}
} }
log.Logger.Info("Request", fields...)
} }
} }
func getRealIP(c *gin.Context) string {
if ip := c.GetHeader("X-Real-IP"); ip != "" {
return ip
}
if ip := c.GetHeader("X-Forwarded-For"); ip != "" {
if index := strings.Index(ip, ","); index != -1 {
return strings.TrimSpace(ip[:index])
}
return ip
}
if ip := c.GetHeader("X-Originating-IP"); ip != "" {
return ip
}
return c.ClientIP()
}
func shouldSkipLog(path string) bool {
skipPaths := []string{
"/swagger/",
"/favicon.ico",
"/health",
}
for _, p := range skipPaths {
if strings.HasPrefix(path, p) {
return true
}
}
return false
}

16
utils/decoder.go Normal file
View File

@ -0,0 +1,16 @@
package utils
import (
"bytes"
"io"
"golang.org/x/text/encoding/htmlindex"
"golang.org/x/text/transform"
)
func Windows1251ToUTF8(b []byte) []byte {
decoder, _ := htmlindex.Get("windows-1251")
reader := transform.NewReader(bytes.NewReader(b), decoder.NewDecoder().Transformer)
body, _ := io.ReadAll(reader)
return body
}

View File

@ -14,5 +14,5 @@ func init() {
} }
func Request() *resty.Request { func Request() *resty.Request {
return client.R().SetHeader("User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:133.0) Gecko/20100101 Firefox/133.0") return client.R().SetHeader("Accept-Charset", "utf-8").SetHeader("User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:133.0) Gecko/20100101 Firefox/133.0")
} }