refactor DeduplicateGameItems
update games.json
This commit is contained in:
parent
7d2722daa4
commit
434dbb1dc2
@ -10,7 +10,7 @@ pcgamedb is a powerful command-line tool designed to scrape and manage repack ga
|
|||||||
- KaOSKrew
|
- KaOSKrew
|
||||||
- DODI
|
- DODI
|
||||||
- FreeGOG
|
- FreeGOG
|
||||||
- ~~GOGGames~~
|
- GOGGames
|
||||||
- OnlineFix
|
- OnlineFix
|
||||||
- Xatab
|
- Xatab
|
||||||
- SteamRIP
|
- SteamRIP
|
||||||
|
@ -4,9 +4,9 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"go.uber.org/zap"
|
||||||
"pcgamedb/db"
|
"pcgamedb/db"
|
||||||
"pcgamedb/log"
|
"pcgamedb/log"
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"pcgamedb/db"
|
|
||||||
"pcgamedb/log"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
"pcgamedb/db"
|
||||||
|
"pcgamedb/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
type importCommandConfig struct {
|
type importCommandConfig struct {
|
||||||
|
@ -33,16 +33,10 @@ func organizeRun(cmd *cobra.Command, args []string) {
|
|||||||
log.Logger.Error("Failed to get games", zap.Error(err))
|
log.Logger.Error("Failed to get games", zap.Error(err))
|
||||||
}
|
}
|
||||||
for _, game := range games {
|
for _, game := range games {
|
||||||
gameInfo, err := crawler.OrganizeGameItem(game)
|
err := crawler.OrganizeGameItem(game)
|
||||||
if err == nil {
|
|
||||||
err = db.SaveGameInfo(gameInfo)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Logger.Error("Failed to save game info", zap.Error(err))
|
log.Logger.Error("failed to organize game item")
|
||||||
continue
|
|
||||||
}
|
|
||||||
log.Logger.Info("Organized game", zap.String("name", game.Name))
|
|
||||||
} else {
|
|
||||||
log.Logger.Error("Failed to organize game", zap.String("name", game.Name))
|
|
||||||
}
|
}
|
||||||
|
log.Logger.Info("game item organized", zap.String("name", game.Name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -38,7 +38,7 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func addRun(cmd *cobra.Command, args []string) {
|
func addRun(cmd *cobra.Command, args []string) {
|
||||||
c := []*ManualCommandConfig{}
|
var c []*ManualCommandConfig
|
||||||
if manualCmdCfg.Config != "" {
|
if manualCmdCfg.Config != "" {
|
||||||
data, err := os.ReadFile(manualCmdCfg.Config)
|
data, err := os.ReadFile(manualCmdCfg.Config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"go.uber.org/zap"
|
||||||
"pcgamedb/crawler"
|
"pcgamedb/crawler"
|
||||||
"pcgamedb/log"
|
"pcgamedb/log"
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
package constant
|
|
||||||
|
|
||||||
const EpicStoreSearchQuery = "query searchStoreQuery($allowCountries: String, $category: String, $count: Int, $country: String!, $keywords: String, $locale: String, $namespace: String, $itemNs: String, $sortBy: String, $sortDir: String, $start: Int, $tag: String, $releaseDate: String, $withPrice: Boolean = false, $withPromotions: Boolean = false) {\n Catalog {\n searchStore(allowCountries: $allowCountries, category: $category, count: $count, country: $country, keywords: $keywords, locale: $locale, namespace: $namespace, itemNs: $itemNs, sortBy: $sortBy, sortDir: $sortDir, releaseDate: $releaseDate, start: $start, tag: $tag) {\n elements {\n title\n id\n namespace\n description\n effectiveDate\n keyImages {\n type\n url\n }\n seller {\n id\n name\n }\n productSlug\n urlSlug\n url\n tags {\n id\n }\n items {\n id\n namespace\n }\n customAttributes {\n key\n value\n }\n categories {\n path\n }\n price(country: $country) @include(if: $withPrice) {\n totalPrice {\n discountPrice\n originalPrice\n voucherDiscount\n discount\n currencyCode\n currencyInfo {\n decimals\n }\n fmtPrice(locale: $locale) {\n originalPrice\n discountPrice\n intermediatePrice\n }\n }\n lineOffers {\n appliedRules {\n id\n endDate\n discountSetting {\n discountType\n }\n }\n }\n }\n promotions(category: $category) @include(if: $withPromotions) {\n promotionalOffers {\n promotionalOffers {\n startDate\n endDate\n discountSetting {\n discountType\n discountPercentage\n }\n }\n }\n upcomingPromotionalOffers {\n promotionalOffers {\n startDate\n endDate\n discountSetting {\n discountType\n discountPercentage\n }\n }\n }\n }\n }\n paging {\n count\n total\n }\n }\n }\n}\n"
|
|
@ -5,7 +5,7 @@ type language struct {
|
|||||||
NativeName string `json:"native_name"`
|
NativeName string `json:"native_name"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var IGDBLanguages map[int]language = map[int]language{
|
var IGDBLanguages = map[int]language{
|
||||||
1: {
|
1: {
|
||||||
Name: "Arabic",
|
Name: "Arabic",
|
||||||
NativeName: "العربية",
|
NativeName: "العربية",
|
||||||
|
@ -4,8 +4,9 @@ const (
|
|||||||
C1337xBaseURL = "https://www.1337x.to"
|
C1337xBaseURL = "https://www.1337x.to"
|
||||||
FreeGOGListURL = "https://freegogpcgames.com/a-z-games-list"
|
FreeGOGListURL = "https://freegogpcgames.com/a-z-games-list"
|
||||||
GOGGamesBaseURL = "https://www.gog-games.to"
|
GOGGamesBaseURL = "https://www.gog-games.to"
|
||||||
GOGGamesURL = "https://www.gog-games.to/search?page=%v&search=&is_new=false&is_updated=true&in_dev_filter=none&sort_by=last_update_desc"
|
GOGGamesURL = "https://www.gog-games.to/search?page=%v&search=&in_dev_filter=none&sort_by=last_update_desc"
|
||||||
GOGGamesPageURL = "https://www.gog-games.to/games/%s"
|
GOGGamesPageURL = "https://www.gog-games.to/game/%s"
|
||||||
|
GOGGamesGameAPIURL = "https://www.gog-games.to/api/v1/games/%s"
|
||||||
SteamSearchURL = "https://store.steampowered.com/search"
|
SteamSearchURL = "https://store.steampowered.com/search"
|
||||||
SteamAppDetailURL = "https://store.steampowered.com/api/appdetails"
|
SteamAppDetailURL = "https://store.steampowered.com/api/appdetails"
|
||||||
SteamAllAppsURL = "https://api.steampowered.com/ISteamApps/GetAppList/v2/?format=json"
|
SteamAllAppsURL = "https://api.steampowered.com/ISteamApps/GetAppList/v2/?format=json"
|
||||||
|
@ -49,7 +49,7 @@ func (c *s1337xCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
trSelection := doc.Find("tbody>tr")
|
trSelection := doc.Find("tbody>tr")
|
||||||
urls := []string{}
|
var urls []string
|
||||||
trSelection.Each(func(i int, trNode *goquery.Selection) {
|
trSelection.Each(func(i int, trNode *goquery.Selection) {
|
||||||
nameSelection := trNode.Find(".name").First()
|
nameSelection := trNode.Find(".name").First()
|
||||||
if aNode := nameSelection.Find("a").Eq(1); aNode.Length() > 0 {
|
if aNode := nameSelection.Find("a").Eq(1); aNode.Length() > 0 {
|
||||||
@ -75,16 +75,10 @@ func (c *s1337xCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ func (c *ChovkaCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
item.UpdateFlag = item.RawName
|
item.UpdateFlag = item.RawName
|
||||||
downloadURL := doc.Find(".download-torrent").AttrOr("href", "")
|
downloadURL := doc.Find(".download-torrent").AttrOr("href", "")
|
||||||
if downloadURL == "" {
|
if downloadURL == "" {
|
||||||
return nil, errors.New("Failed to find download URL")
|
return nil, errors.New("failed to find download URL")
|
||||||
}
|
}
|
||||||
resp, err = utils.Fetch(utils.FetchConfig{
|
resp, err = utils.Fetch(utils.FetchConfig{
|
||||||
Headers: map[string]string{"Referer": url},
|
Headers: map[string]string{"Referer": url},
|
||||||
@ -81,8 +81,8 @@ func (c *ChovkaCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
urls := []string{}
|
var urls []string
|
||||||
updateFlags := []string{}
|
var updateFlags []string
|
||||||
doc.Find(".entry").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".entry").Each(func(i int, s *goquery.Selection) {
|
||||||
u, exist := s.Find(".entry__title.h2 a").Attr("href")
|
u, exist := s.Find(".entry__title.h2 a").Attr("href")
|
||||||
if !exist {
|
if !exist {
|
||||||
@ -107,15 +107,11 @@ func (c *ChovkaCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
info, err := OrganizeGameItem(item)
|
|
||||||
if err != nil {
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if err := db.SaveGameInfo(info); err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
@ -32,8 +32,8 @@ func BuildCrawlerMap(logger *zap.Logger) map[string]Crawler {
|
|||||||
"onlinefix": NewOnlineFixCrawler(logger),
|
"onlinefix": NewOnlineFixCrawler(logger),
|
||||||
"steamrip": NewSteamRIPCrawler(logger),
|
"steamrip": NewSteamRIPCrawler(logger),
|
||||||
"chovka": NewChovkaCrawler(logger),
|
"chovka": NewChovkaCrawler(logger),
|
||||||
|
"goggames": NewGOGGamesCrawler(logger),
|
||||||
// "gnarly": NewGnarlyCrawler(logger),
|
// "gnarly": NewGnarlyCrawler(logger),
|
||||||
// "goggames": NewGOGGamesCrawler(logger),
|
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
}
|
}
|
||||||
titleElem := doc.Find("h3").First().Find("strong")
|
titleElem := doc.Find("h3").First().Find("strong")
|
||||||
if titleElem.Length() == 0 {
|
if titleElem.Length() == 0 {
|
||||||
return nil, errors.New("Failed to find title")
|
return nil, errors.New("failed to find title")
|
||||||
}
|
}
|
||||||
rawTitle := titleElem.Text()
|
rawTitle := titleElem.Text()
|
||||||
titleElem.Children().Remove()
|
titleElem.Children().Remove()
|
||||||
@ -52,13 +52,13 @@ func (c *FitGirlCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
sizeRegex := regexp.MustCompile(`Repack Size: <strong>(.*?)</strong>`)
|
sizeRegex := regexp.MustCompile(`Repack Size: <strong>(.*?)</strong>`)
|
||||||
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data))
|
sizeRegexRes := sizeRegex.FindStringSubmatch(string(resp.Data))
|
||||||
if len(sizeRegexRes) == 0 {
|
if len(sizeRegexRes) == 0 {
|
||||||
return nil, errors.New("Failed to find size")
|
return nil, errors.New("failed to find size")
|
||||||
}
|
}
|
||||||
size := sizeRegexRes[1]
|
size := sizeRegexRes[1]
|
||||||
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
|
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`)
|
||||||
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data))
|
magnetRegexRes := magnetRegex.FindStringSubmatch(string(resp.Data))
|
||||||
if len(magnetRegexRes) == 0 {
|
if len(magnetRegexRes) == 0 {
|
||||||
return nil, errors.New("Failed to find magnet")
|
return nil, errors.New("failed to find magnet")
|
||||||
}
|
}
|
||||||
magnet := magnetRegexRes[0]
|
magnet := magnetRegexRes[0]
|
||||||
item, err := db.GetGameItemByUrl(url)
|
item, err := db.GetGameItemByUrl(url)
|
||||||
@ -87,8 +87,8 @@ func (c *FitGirlCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
urls := []string{}
|
var urls []string
|
||||||
updateFlags := []string{} //link+date
|
var updateFlags []string //link+date
|
||||||
doc.Find("article").Each(func(i int, s *goquery.Selection) {
|
doc.Find("article").Each(func(i int, s *goquery.Selection) {
|
||||||
u, exist1 := s.Find(".entry-title>a").First().Attr("href")
|
u, exist1 := s.Find(".entry-title>a").First().Attr("href")
|
||||||
d, exist2 := s.Find("time").First().Attr("datetime")
|
d, exist2 := s.Find("time").First().Attr("datetime")
|
||||||
@ -115,16 +115,10 @@ func (c *FitGirlCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
@ -52,14 +52,14 @@ func (c *FreeGOGCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
urls := []string{}
|
var urls []string
|
||||||
updateFlags := []string{} //rawName+link
|
var updateFlags []string //rawName+link
|
||||||
doc.Find(".items-outer li a").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".items-outer li a").Each(func(i int, s *goquery.Selection) {
|
||||||
urls = append(urls, s.AttrOr("href", ""))
|
urls = append(urls, s.AttrOr("href", ""))
|
||||||
updateFlags = append(updateFlags, s.Text()+s.AttrOr("href", ""))
|
updateFlags = append(updateFlags, s.Text()+s.AttrOr("href", ""))
|
||||||
})
|
})
|
||||||
|
|
||||||
res := []*model.GameItem{}
|
var res []*model.GameItem
|
||||||
for i, u := range urls {
|
for i, u := range urls {
|
||||||
if count == num {
|
if count == num {
|
||||||
break
|
break
|
||||||
@ -81,16 +81,10 @@ func (c *FreeGOGCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
|||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
count++
|
count++
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
@ -131,7 +125,7 @@ func (c *FreeGOGCrawler) CrawlByUrl(url string, session *utils.WAFSession) (*mod
|
|||||||
}
|
}
|
||||||
item.Download = string(magnet)
|
item.Download = string(magnet)
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New("Failed to find magnet link")
|
return nil, errors.New("failed to find magnet link")
|
||||||
}
|
}
|
||||||
item.Author = "FreeGOG"
|
item.Author = "FreeGOG"
|
||||||
return item, nil
|
return item, nil
|
||||||
|
@ -6,10 +6,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uber.org/zap"
|
||||||
"pcgamedb/db"
|
"pcgamedb/db"
|
||||||
"pcgamedb/model"
|
"pcgamedb/model"
|
||||||
"pcgamedb/utils"
|
"pcgamedb/utils"
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
@ -22,11 +22,17 @@ func GenerateGameInfo(platform string, id int) (*model.GameInfo, error) {
|
|||||||
case "igdb":
|
case "igdb":
|
||||||
return GenerateIGDBGameInfo(id)
|
return GenerateIGDBGameInfo(id)
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("Invalid ID type")
|
return nil, errors.New("invalid ID type")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func OrganizeGameItem(game *model.GameItem) (*model.GameInfo, error) {
|
// OrganizeGameItem Organize and save GameInfo
|
||||||
|
func OrganizeGameItem(game *model.GameItem) error {
|
||||||
|
hasOriganized, _ := db.HasGameItemOrganized(game.ID)
|
||||||
|
if hasOriganized {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
item, err := OrganizeGameItemWithIGDB(0, game)
|
item, err := OrganizeGameItemWithIGDB(0, game)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if item.SteamID == 0 {
|
if item.SteamID == 0 {
|
||||||
@ -35,7 +41,11 @@ func OrganizeGameItem(game *model.GameItem) (*model.GameInfo, error) {
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
item.SteamID = steamID
|
item.SteamID = steamID
|
||||||
}
|
}
|
||||||
return item, nil
|
err = db.SaveGameInfo(item)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
item, err = OrganizeGameItemWithSteam(0, game)
|
item, err = OrganizeGameItemWithSteam(0, game)
|
||||||
@ -46,9 +56,13 @@ func OrganizeGameItem(game *model.GameItem) (*model.GameInfo, error) {
|
|||||||
item.IGDBID = igdbID
|
item.IGDBID = igdbID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return item, nil
|
err = db.SaveGameInfo(item)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddGameInfoManually(gameID primitive.ObjectID, platform string, plateformID int) (*model.GameInfo, error) {
|
func AddGameInfoManually(gameID primitive.ObjectID, platform string, plateformID int) (*model.GameInfo, error) {
|
||||||
@ -64,7 +78,7 @@ func AddGameInfoManually(gameID primitive.ObjectID, platform string, plateformID
|
|||||||
func OrganizeGameItemManually(gameID primitive.ObjectID, platform string, platformID int) (*model.GameInfo, error) {
|
func OrganizeGameItemManually(gameID primitive.ObjectID, platform string, platformID int) (*model.GameInfo, error) {
|
||||||
info, err := db.GetGameInfoByPlatformID(platform, platformID)
|
info, err := db.GetGameInfoByPlatformID(platform, platformID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == mongo.ErrNoDocuments {
|
if errors.Is(err, mongo.ErrNoDocuments) {
|
||||||
info, err = AddGameInfoManually(gameID, platform, platformID)
|
info, err = AddGameInfoManually(gameID, platform, platformID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -98,7 +112,7 @@ func FormatName(name string) string {
|
|||||||
name = regexp.MustCompile(`(?i)[\w’'-]+\s(Edition|Vision|Collection|Bundle|Pack|Deluxe)`).ReplaceAllString(name, " ")
|
name = regexp.MustCompile(`(?i)[\w’'-]+\s(Edition|Vision|Collection|Bundle|Pack|Deluxe)`).ReplaceAllString(name, " ")
|
||||||
name = regexp.MustCompile(`(?i)GOTY`).ReplaceAllString(name, "")
|
name = regexp.MustCompile(`(?i)GOTY`).ReplaceAllString(name, "")
|
||||||
name = regexp.MustCompile(`(?i)nsw for pc`).ReplaceAllString(name, "")
|
name = regexp.MustCompile(`(?i)nsw for pc`).ReplaceAllString(name, "")
|
||||||
name = regexp.MustCompile(`\([^\)]+\)`).ReplaceAllString(name, "")
|
name = regexp.MustCompile(`\([^)]+\)`).ReplaceAllString(name, "")
|
||||||
name = regexp.MustCompile(`\s+`).ReplaceAllString(name, " ")
|
name = regexp.MustCompile(`\s+`).ReplaceAllString(name, " ")
|
||||||
name = strings.Replace(name, ": Remastered", "", -1)
|
name = strings.Replace(name, ": Remastered", "", -1)
|
||||||
name = strings.Replace(name, ": Remaster", "", -1)
|
name = strings.Replace(name, ": Remaster", "", -1)
|
||||||
@ -133,7 +147,7 @@ func SupplementPlatformIDToGameInfo(logger *zap.Logger) error {
|
|||||||
changed = true
|
changed = true
|
||||||
}
|
}
|
||||||
if changed {
|
if changed {
|
||||||
logger.Info("Supplemented platform id for game info", zap.String("name", info.Name), zap.Int("igdb", int(info.IGDBID)), zap.Int("steam", int(info.SteamID)))
|
logger.Info("Supplemented platform id for game info", zap.String("name", info.Name), zap.Int("igdb", info.IGDBID), zap.Int("steam", info.SteamID))
|
||||||
_ = db.SaveGameInfo(info)
|
_ = db.SaveGameInfo(info)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,106 +0,0 @@
|
|||||||
package crawler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"pcgamedb/constant"
|
|
||||||
"pcgamedb/db"
|
|
||||||
"pcgamedb/model"
|
|
||||||
"pcgamedb/utils"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
|
||||||
|
|
||||||
type GnarlyCrawler struct {
|
|
||||||
logger *zap.Logger
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewGnarlyCrawler(logger *zap.Logger) *GnarlyCrawler {
|
|
||||||
return &GnarlyCrawler{
|
|
||||||
logger: logger,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *GnarlyCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
|
||||||
var res []*model.GameItem
|
|
||||||
count := 0
|
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
|
||||||
Url: constant.GnarlyURL,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
sizeRegex := regexp.MustCompile(`\[(\d+)\s(GB|MB)\]`)
|
|
||||||
pElementHtml := make([]string, 0)
|
|
||||||
doc.Find("p").Each(func(i int, s *goquery.Selection) {
|
|
||||||
pElementHtml = append(pElementHtml, s.Text())
|
|
||||||
})
|
|
||||||
for _, s := range pElementHtml {
|
|
||||||
if strings.Contains(s, "https://bin.0xfc.de/") {
|
|
||||||
lines := strings.Split(s, "\n")
|
|
||||||
for i := 0; i < len(lines); i++ {
|
|
||||||
if strings.Contains(lines[i], "[Gnarly Repacks]") {
|
|
||||||
i++
|
|
||||||
if strings.Contains(lines[i], "https://bin.0xfc.de/") {
|
|
||||||
if count == num {
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
if db.IsGnarlyCrawled(lines[i-1]) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
item, err := db.GetGameItemByUrl(lines[i])
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
sizeRegexRes := sizeRegex.FindStringSubmatch(lines[i])
|
|
||||||
if len(sizeRegexRes) == 3 {
|
|
||||||
item.Size = sizeRegexRes[1] + " " + sizeRegexRes[2]
|
|
||||||
}
|
|
||||||
c.logger.Info("Crawling", zap.String("Name", lines[i-1]))
|
|
||||||
item.RawName = lines[i-1]
|
|
||||||
item.Url = constant.GnarlyURL
|
|
||||||
item.Author = "Gnarly"
|
|
||||||
item.Name = GnarlyFormatter(item.RawName)
|
|
||||||
download, err := utils.DecryptPrivateBin(lines[i], "gnarly")
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
item.Download = download
|
|
||||||
item.UpdateFlag = item.RawName
|
|
||||||
res = append(res, item)
|
|
||||||
count++
|
|
||||||
info, err := OrganizeGameItem(item)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save game info", zap.Error(err))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *GnarlyCrawler) CrawlAll() ([]*model.GameItem, error) {
|
|
||||||
return c.Crawl(-1)
|
|
||||||
}
|
|
||||||
|
|
||||||
var parenthesesRegex = regexp.MustCompile(`\(([^)]+)\)`)
|
|
||||||
|
|
||||||
func GnarlyFormatter(name string) string {
|
|
||||||
name = name[:strings.Index(name, " [Gnarly Repacks]")]
|
|
||||||
name = parenthesesRegex.ReplaceAllString(name, "")
|
|
||||||
return strings.TrimSpace(name)
|
|
||||||
}
|
|
@ -3,6 +3,7 @@ package crawler
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -29,14 +30,20 @@ func (c *GOGGamesCrawler) Name() string {
|
|||||||
return "GOGGamesCrawler"
|
return "GOGGamesCrawler"
|
||||||
}
|
}
|
||||||
|
|
||||||
// URL is api url, like https://www.gog-games.to/api/v1/games/%s
|
|
||||||
func (c *GOGGamesCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
func (c *GOGGamesCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
||||||
token, err := utils.CCSTurnstileToken(config.Config.CFClearanceScraper.Url, URL, "0x4AAAAAAAfOlgvCKbOdW1zc")
|
if !strings.HasPrefix(URL, "https://www.gog-games.to/game/") {
|
||||||
|
return nil, fmt.Errorf("invalid url")
|
||||||
|
}
|
||||||
|
_, slug := path.Split(URL)
|
||||||
|
|
||||||
|
apiUrl := fmt.Sprintf(constant.GOGGamesGameAPIURL, slug)
|
||||||
|
|
||||||
|
token, err := utils.CCSTurnstileToken(config.Config.CFClearanceScraper.Url, apiUrl, "0x4AAAAAAAfOlgvCKbOdW1zc")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
resp, err := utils.Fetch(utils.FetchConfig{
|
resp, err := utils.Fetch(utils.FetchConfig{
|
||||||
Url: URL,
|
Url: apiUrl,
|
||||||
Headers: map[string]string{
|
Headers: map[string]string{
|
||||||
"cf-turnstile-response": token,
|
"cf-turnstile-response": token,
|
||||||
},
|
},
|
||||||
@ -51,15 +58,34 @@ func (c *GOGGamesCrawler) CrawlByUrl(URL string) (*model.GameItem, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
name := data.Title
|
name := data.Title
|
||||||
|
|
||||||
|
// find download links
|
||||||
|
fileHosters := []string{
|
||||||
|
"gofile",
|
||||||
|
"fileditch",
|
||||||
|
"qiwi",
|
||||||
|
"filesfm",
|
||||||
|
"pixeldrain",
|
||||||
|
"1fichier",
|
||||||
|
}
|
||||||
links := make([]string, 0)
|
links := make([]string, 0)
|
||||||
for _, link := range data.Links.Game.Gofile.Links {
|
for _, h := range fileHosters {
|
||||||
links = append(links, link.Link)
|
if value, exist := data.Links.Game[h]; exist {
|
||||||
}
|
for _, link := range value.Links {
|
||||||
if len(data.Links.Patch.Gofile.Links) > 0 {
|
|
||||||
for _, link := range data.Links.Patch.Gofile.Links {
|
|
||||||
links = append(links, link.Link)
|
links = append(links, link.Link)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, exist := data.Links.Patch[h]; exist {
|
||||||
|
for _, link := range value.Links {
|
||||||
|
links = append(links, link.Link)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(links) == 0 {
|
||||||
|
return nil, fmt.Errorf("no download link found")
|
||||||
|
}
|
||||||
|
|
||||||
size := uint64(0)
|
size := uint64(0)
|
||||||
for _, file := range data.Files.Game {
|
for _, file := range data.Files.Game {
|
||||||
s, _ := utils.SizeToBytes(file.Size)
|
s, _ := utils.SizeToBytes(file.Size)
|
||||||
@ -92,36 +118,32 @@ func (c *GOGGamesCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
urls := make([]string, 0)
|
urls := make([]string, 0)
|
||||||
updateFlags := []string{} //link+date
|
var updateFlags []string //link+date
|
||||||
for _, item := range data.Data {
|
for _, item := range data.Data {
|
||||||
urls = append(urls, fmt.Sprintf(constant.GOGGamesPageURL, item.Slug))
|
urls = append(urls, fmt.Sprintf(constant.GOGGamesPageURL, item.Slug))
|
||||||
updateFlags = append(updateFlags, fmt.Sprintf("%s%s", item.GogURL, item.LastUpdate))
|
updateFlags = append(updateFlags, fmt.Sprintf("%s%s", item.GogURL, item.LastUpdate))
|
||||||
}
|
}
|
||||||
res := make([]*model.GameItem, 0)
|
res := make([]*model.GameItem, 0)
|
||||||
for i, u := range urls {
|
for i, u := range urls {
|
||||||
c.logger.Info("Crawling", zap.String("URL", u))
|
|
||||||
if db.IsGameCrawled(updateFlags[i], "GOGGames") {
|
if db.IsGameCrawled(updateFlags[i], "GOGGames") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
c.logger.Info("Crawling", zap.String("URL", u))
|
||||||
item, err := c.CrawlByUrl(u)
|
item, err := c.CrawlByUrl(u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
item.UpdateFlag = updateFlags[i]
|
||||||
if err := db.SaveGameItem(item); err != nil {
|
if err := db.SaveGameItem(item); err != nil {
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if err := db.SaveGameInfo(info); err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
@ -261,73 +283,21 @@ type gameResult struct {
|
|||||||
} `json:"links"`
|
} `json:"links"`
|
||||||
} `json:"gofile"`
|
} `json:"gofile"`
|
||||||
} `json:"goodie"`
|
} `json:"goodie"`
|
||||||
Game struct {
|
Game map[string]struct {
|
||||||
OneFichier struct {
|
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Links []struct {
|
Links []struct {
|
||||||
Label string `json:"label"`
|
Label string `json:"label"`
|
||||||
Link string `json:"link"`
|
Link string `json:"link"`
|
||||||
} `json:"links"`
|
} `json:"links"`
|
||||||
} `json:"1fichier"`
|
|
||||||
Vikingfile struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Links []struct {
|
|
||||||
Label string `json:"label"`
|
|
||||||
Link string `json:"link"`
|
|
||||||
} `json:"links"`
|
|
||||||
} `json:"vikingfile"`
|
|
||||||
Pixeldrain struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Links []struct {
|
|
||||||
Label string `json:"label"`
|
|
||||||
Link string `json:"link"`
|
|
||||||
} `json:"links"`
|
|
||||||
} `json:"pixeldrain"`
|
|
||||||
Gofile struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Links []struct {
|
|
||||||
Label string `json:"label"`
|
|
||||||
Link string `json:"link"`
|
|
||||||
} `json:"links"`
|
|
||||||
} `json:"gofile"`
|
|
||||||
} `json:"game"`
|
} `json:"game"`
|
||||||
Patch struct {
|
Patch map[string]struct {
|
||||||
OneFichier struct {
|
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Links []struct {
|
Links []struct {
|
||||||
Label string `json:"label"`
|
Label string `json:"label"`
|
||||||
Link string `json:"link"`
|
Link string `json:"link"`
|
||||||
} `json:"links"`
|
} `json:"links"`
|
||||||
} `json:"1fichier"`
|
|
||||||
Vikingfile struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Links []struct {
|
|
||||||
Label string `json:"label"`
|
|
||||||
Link string `json:"link"`
|
|
||||||
} `json:"links"`
|
|
||||||
} `json:"vikingfile"`
|
|
||||||
Pixeldrain struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Links []struct {
|
|
||||||
Label string `json:"label"`
|
|
||||||
Link string `json:"link"`
|
|
||||||
} `json:"links"`
|
|
||||||
} `json:"pixeldrain"`
|
|
||||||
Gofile struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Links []struct {
|
|
||||||
Label string `json:"label"`
|
|
||||||
Link string `json:"link"`
|
|
||||||
} `json:"links"`
|
|
||||||
} `json:"gofile"`
|
|
||||||
} `json:"patch"`
|
} `json:"patch"`
|
||||||
} `json:"links"`
|
} `json:"links"`
|
||||||
Files struct {
|
Files struct {
|
||||||
|
@ -237,7 +237,7 @@ func GetIGDBCompany(id int) (string, error) {
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
return "", errors.New("Not found")
|
return "", errors.New("not found")
|
||||||
}
|
}
|
||||||
if data[0].Name == "" {
|
if data[0].Name == "" {
|
||||||
return GetIGDBCompany(id)
|
return GetIGDBCompany(id)
|
||||||
@ -311,7 +311,7 @@ func GenerateIGDBGameInfo(id int) (*model.GameInfo, error) {
|
|||||||
return item, nil
|
return item, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// id=0, means search id by name
|
// OrganizeGameItemWithIGDB Will add GameItem.ID to the newly added GameInfo.GameIDs
|
||||||
func OrganizeGameItemWithIGDB(id int, game *model.GameItem) (*model.GameInfo, error) {
|
func OrganizeGameItemWithIGDB(id int, game *model.GameItem) (*model.GameInfo, error) {
|
||||||
var err error
|
var err error
|
||||||
if id == 0 {
|
if id == 0 {
|
||||||
@ -364,7 +364,7 @@ func GetIGDBIDBySteamID(id int) (int, error) {
|
|||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
return 0, errors.New("Not found")
|
return 0, errors.New("not found")
|
||||||
}
|
}
|
||||||
if data[0].Game == 0 {
|
if data[0].Game == 0 {
|
||||||
return GetIGDBIDBySteamID(id)
|
return GetIGDBIDBySteamID(id)
|
||||||
|
@ -40,7 +40,7 @@ func (c *OnlineFixCrawler) Name() string {
|
|||||||
func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
||||||
if !config.Config.OnlineFixAvaliable {
|
if !config.Config.OnlineFixAvaliable {
|
||||||
c.logger.Error("Need Online Fix account")
|
c.logger.Error("Need Online Fix account")
|
||||||
return nil, errors.New("Online Fix is not available")
|
return nil, errors.New("online Fix is not available")
|
||||||
}
|
}
|
||||||
if len(c.cookies) == 0 {
|
if len(c.cookies) == 0 {
|
||||||
err := c.login()
|
err := c.login()
|
||||||
@ -66,8 +66,8 @@ func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
urls := []string{}
|
var urls []string
|
||||||
updateFlags := []string{} //link+date
|
var updateFlags []string //link+date
|
||||||
doc.Find("article.news").Each(func(i int, s *goquery.Selection) {
|
doc.Find("article.news").Each(func(i int, s *goquery.Selection) {
|
||||||
urls = append(urls, s.Find(".big-link").First().AttrOr("href", ""))
|
urls = append(urls, s.Find(".big-link").First().AttrOr("href", ""))
|
||||||
updateFlags = append(
|
updateFlags = append(
|
||||||
@ -95,16 +95,10 @@ func (c *OnlineFixCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
@ -130,12 +124,12 @@ func (c *OnlineFixCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
titleRegex := regexp.MustCompile(`(?i)<h1.*?>(.*?)</h1>`)
|
titleRegex := regexp.MustCompile(`(?i)<h1.*?>(.*?)</h1>`)
|
||||||
titleRegexRes := titleRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
titleRegexRes := titleRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||||
if len(titleRegexRes) == 0 {
|
if len(titleRegexRes) == 0 {
|
||||||
return nil, errors.New("Failed to find title")
|
return nil, errors.New("failed to find title")
|
||||||
}
|
}
|
||||||
downloadRegex := regexp.MustCompile(`(?i)<a[^>]*\bhref="([^"]+)"[^>]*>(Скачать Torrent|Скачать торрент)</a>`)
|
downloadRegex := regexp.MustCompile(`(?i)<a[^>]*\bhref="([^"]+)"[^>]*>(Скачать Torrent|Скачать торрент)</a>`)
|
||||||
downloadRegexRes := downloadRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
downloadRegexRes := downloadRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||||
if len(downloadRegexRes) == 0 {
|
if len(downloadRegexRes) == 0 {
|
||||||
return nil, errors.New("Failed to find download button")
|
return nil, errors.New("failed to find download button")
|
||||||
}
|
}
|
||||||
item, err := db.GetGameItemByUrl(url)
|
item, err := db.GetGameItemByUrl(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -160,7 +154,7 @@ func (c *OnlineFixCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
magnetRegex := regexp.MustCompile(`(?i)"(.*?).torrent"`)
|
magnetRegex := regexp.MustCompile(`(?i)"(.*?).torrent"`)
|
||||||
magnetRegexRes := magnetRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
magnetRegexRes := magnetRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||||
if len(magnetRegexRes) == 0 {
|
if len(magnetRegexRes) == 0 {
|
||||||
return nil, errors.New("Failed to find magnet")
|
return nil, errors.New("failed to find magnet")
|
||||||
}
|
}
|
||||||
resp, err = utils.Fetch(utils.FetchConfig{
|
resp, err = utils.Fetch(utils.FetchConfig{
|
||||||
Url: downloadRegexRes[0][1] + strings.Trim(magnetRegexRes[0][0], "\""),
|
Url: downloadRegexRes[0][1] + strings.Trim(magnetRegexRes[0][0], "\""),
|
||||||
@ -179,12 +173,12 @@ func (c *OnlineFixCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
} else if strings.Contains(downloadRegexRes[0][1], "online-fix.me/ext") {
|
} else if strings.Contains(downloadRegexRes[0][1], "online-fix.me/ext") {
|
||||||
if strings.Contains(string(resp.Data), "mega.nz") {
|
if strings.Contains(string(resp.Data), "mega.nz") {
|
||||||
if !config.Config.MegaAvaliable {
|
if !config.Config.MegaAvaliable {
|
||||||
return nil, errors.New("Mega is not avaliable")
|
return nil, errors.New("mega is not avaliable")
|
||||||
}
|
}
|
||||||
megaRegex := regexp.MustCompile(`(?i)location.href=\\'([^\\']*)\\'`)
|
megaRegex := regexp.MustCompile(`(?i)location.href=\\'([^\\']*)\\'`)
|
||||||
megaRegexRes := megaRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
megaRegexRes := megaRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||||
if len(megaRegexRes) == 0 {
|
if len(megaRegexRes) == 0 {
|
||||||
return nil, errors.New("Failed to find download link")
|
return nil, errors.New("failed to find download link")
|
||||||
}
|
}
|
||||||
path, files, err := utils.MegaDownload(megaRegexRes[0][1], "torrent")
|
path, files, err := utils.MegaDownload(megaRegexRes[0][1], "torrent")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -207,10 +201,10 @@ func (c *OnlineFixCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
}
|
}
|
||||||
_ = os.RemoveAll(path)
|
_ = os.RemoveAll(path)
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New("Failed to find download link")
|
return nil, errors.New("failed to find download link")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New("Failed to find download link")
|
return nil, errors.New("failed to find download link")
|
||||||
}
|
}
|
||||||
return item, nil
|
return item, nil
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,7 @@ func _GetSteamID(name string) (int, error) {
|
|||||||
nameRegexRes := nameRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
nameRegexRes := nameRegex.FindAllStringSubmatch(string(resp.Data), -1)
|
||||||
|
|
||||||
if len(idRegexRes) == 0 {
|
if len(idRegexRes) == 0 {
|
||||||
return 0, fmt.Errorf("Steam ID not found: %s", name)
|
return 0, fmt.Errorf("steam ID not found: %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
maxSim := 0.0
|
maxSim := 0.0
|
||||||
@ -59,7 +59,7 @@ func _GetSteamID(name string) (int, error) {
|
|||||||
if maxSimID != 0 {
|
if maxSimID != 0 {
|
||||||
return maxSimID, nil
|
return maxSimID, nil
|
||||||
}
|
}
|
||||||
return 0, fmt.Errorf("Steam ID not found: %s", name)
|
return 0, fmt.Errorf("steam ID not found: %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSteamID(name string) (int, error) {
|
func GetSteamID(name string) (int, error) {
|
||||||
@ -75,7 +75,7 @@ func GetSteamID(name string) (int, error) {
|
|||||||
return id, nil
|
return id, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0, errors.New("Steam ID not found")
|
return 0, errors.New("steam ID not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSteamIDCache(name string) (int, error) {
|
func GetSteamIDCache(name string) (int, error) {
|
||||||
@ -121,10 +121,10 @@ func GetSteamAppDetail(id int) (*model.SteamAppDetail, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if _, ok := detail[strconv.Itoa(id)]; !ok {
|
if _, ok := detail[strconv.Itoa(id)]; !ok {
|
||||||
return nil, fmt.Errorf("Steam App not found: %d", id)
|
return nil, fmt.Errorf("steam App not found: %d", id)
|
||||||
}
|
}
|
||||||
if detail[strconv.Itoa(id)] == nil {
|
if detail[strconv.Itoa(id)] == nil {
|
||||||
return nil, fmt.Errorf("Steam App not found: %d", id)
|
return nil, fmt.Errorf("steam App not found: %d", id)
|
||||||
}
|
}
|
||||||
return detail[strconv.Itoa(id)], nil
|
return detail[strconv.Itoa(id)], nil
|
||||||
}
|
}
|
||||||
@ -168,7 +168,7 @@ func GenerateSteamGameInfo(id int) (*model.GameInfo, error) {
|
|||||||
item.Cover = fmt.Sprintf("https://shared.cloudflare.steamstatic.com/store_item_assets/steam/apps/%v/library_600x900_2x.jpg", id)
|
item.Cover = fmt.Sprintf("https://shared.cloudflare.steamstatic.com/store_item_assets/steam/apps/%v/library_600x900_2x.jpg", id)
|
||||||
item.Developers = detail.Data.Developers
|
item.Developers = detail.Data.Developers
|
||||||
item.Publishers = detail.Data.Publishers
|
item.Publishers = detail.Data.Publishers
|
||||||
screenshots := []string{}
|
var screenshots []string
|
||||||
for _, screenshot := range detail.Data.Screenshots {
|
for _, screenshot := range detail.Data.Screenshots {
|
||||||
screenshots = append(screenshots, screenshot.PathFull)
|
screenshots = append(screenshots, screenshot.PathFull)
|
||||||
}
|
}
|
||||||
@ -176,6 +176,7 @@ func GenerateSteamGameInfo(id int) (*model.GameInfo, error) {
|
|||||||
return item, nil
|
return item, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OrganizeGameItemWithSteam Will add GameItem.ID to the newly added GameInfo.GameIDs
|
||||||
func OrganizeGameItemWithSteam(id int, game *model.GameItem) (*model.GameInfo, error) {
|
func OrganizeGameItemWithSteam(id int, game *model.GameItem) (*model.GameInfo, error) {
|
||||||
var err error
|
var err error
|
||||||
if id == 0 {
|
if id == 0 {
|
||||||
@ -229,14 +230,14 @@ func GetSteamIDByIGDBID(IGDBID int) (int, error) {
|
|||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
return 0, errors.New("Not found")
|
return 0, errors.New("not found")
|
||||||
}
|
}
|
||||||
for _, v := range data {
|
for _, v := range data {
|
||||||
if strings.HasPrefix(v.Url, "https://store.steampowered.com/app/") {
|
if strings.HasPrefix(v.Url, "https://store.steampowered.com/app/") {
|
||||||
regex := regexp.MustCompile(`https://store.steampowered.com/app/(\d+)/?`)
|
regex := regexp.MustCompile(`https://store.steampowered.com/app/(\d+)/?`)
|
||||||
idStr := regex.FindStringSubmatch(v.Url)
|
idStr := regex.FindStringSubmatch(v.Url)
|
||||||
if len(idStr) < 2 {
|
if len(idStr) < 2 {
|
||||||
return 0, errors.New("Failed parse")
|
return 0, errors.New("failed parse")
|
||||||
}
|
}
|
||||||
steamID, err := strconv.Atoi(idStr[1])
|
steamID, err := strconv.Atoi(idStr[1])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -245,7 +246,7 @@ func GetSteamIDByIGDBID(IGDBID int) (int, error) {
|
|||||||
return steamID, nil
|
return steamID, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0, errors.New("Not found")
|
return 0, errors.New("not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSteamIDByIGDBIDCache(IGDBID int) (int, error) {
|
func GetSteamIDByIGDBIDCache(IGDBID int) (int, error) {
|
||||||
|
@ -76,7 +76,7 @@ func (c *SteamRIPCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if item.Download == "" {
|
if item.Download == "" {
|
||||||
return nil, errors.New("Failed to find download link")
|
return nil, errors.New("failed to find download link")
|
||||||
}
|
}
|
||||||
|
|
||||||
return item, nil
|
return item, nil
|
||||||
@ -95,8 +95,8 @@ func (c *SteamRIPCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
var items []*model.GameItem
|
var items []*model.GameItem
|
||||||
urls := []string{}
|
var urls []string
|
||||||
updateFlags := []string{} // title
|
var updateFlags []string // title
|
||||||
doc.Find(".az-list-item>a").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".az-list-item>a").Each(func(i int, s *goquery.Selection) {
|
||||||
u, exist := s.Attr("href")
|
u, exist := s.Attr("href")
|
||||||
if !exist {
|
if !exist {
|
||||||
@ -125,16 +125,10 @@ func (c *SteamRIPCrawler) Crawl(num int) ([]*model.GameItem, error) {
|
|||||||
}
|
}
|
||||||
items = append(items, item)
|
items = append(items, item)
|
||||||
count++
|
count++
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
@ -45,8 +45,8 @@ func (c *XatabCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
c.logger.Error("Failed to parse HTML", zap.Error(err))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
urls := []string{}
|
var urls []string
|
||||||
updateFlags := []string{} // title
|
var updateFlags []string // title
|
||||||
doc.Find(".entry").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".entry").Each(func(i int, s *goquery.Selection) {
|
||||||
u, exist := s.Find(".entry__title.h2 a").Attr("href")
|
u, exist := s.Find(".entry__title.h2 a").Attr("href")
|
||||||
if !exist {
|
if !exist {
|
||||||
@ -72,16 +72,10 @@ func (c *XatabCrawler) Crawl(page int) ([]*model.GameItem, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, item)
|
res = append(res, item)
|
||||||
info, err := OrganizeGameItem(item)
|
if err := OrganizeGameItem(item); err != nil {
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
c.logger.Warn("Failed to organize", zap.Error(err), zap.String("URL", u))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
err = db.SaveGameInfo(info)
|
|
||||||
if err != nil {
|
|
||||||
c.logger.Warn("Failed to save", zap.Error(err), zap.String("URL", u))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
@ -108,7 +102,7 @@ func (c *XatabCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
|
|||||||
item.UpdateFlag = item.RawName
|
item.UpdateFlag = item.RawName
|
||||||
downloadURL := doc.Find("#download>a").First().AttrOr("href", "")
|
downloadURL := doc.Find("#download>a").First().AttrOr("href", "")
|
||||||
if downloadURL == "" {
|
if downloadURL == "" {
|
||||||
return nil, errors.New("Failed to find download URL")
|
return nil, errors.New("failed to find download URL")
|
||||||
}
|
}
|
||||||
resp, err = utils.Fetch(utils.FetchConfig{
|
resp, err = utils.Fetch(utils.FetchConfig{
|
||||||
Headers: map[string]string{"Referer": url},
|
Headers: map[string]string{"Referer": url},
|
||||||
|
@ -41,6 +41,15 @@ func (c *CustomCollection) UpdateOne(ctx context.Context, filter interface{}, up
|
|||||||
return c.coll.UpdateOne(ctx, filter, update, opts...)
|
return c.coll.UpdateOne(ctx, filter, update, opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *CustomCollection) UpdateMany(ctx context.Context, filter interface{}, update interface{},
|
||||||
|
opts ...*options.UpdateOptions) (*mongo.UpdateResult, error) {
|
||||||
|
CheckConnect()
|
||||||
|
if c.coll == nil {
|
||||||
|
c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName)
|
||||||
|
}
|
||||||
|
return c.coll.UpdateMany(ctx, filter, update, opts...)
|
||||||
|
}
|
||||||
|
|
||||||
func (c *CustomCollection) Aggregate(ctx context.Context, pipeline interface{},
|
func (c *CustomCollection) Aggregate(ctx context.Context, pipeline interface{},
|
||||||
opts ...*options.AggregateOptions) (*mongo.Cursor, error) {
|
opts ...*options.AggregateOptions) (*mongo.Cursor, error) {
|
||||||
CheckConnect()
|
CheckConnect()
|
||||||
@ -94,3 +103,12 @@ func (c *CustomCollection) InsertMany(ctx context.Context, documents []interface
|
|||||||
}
|
}
|
||||||
return c.coll.InsertMany(ctx, documents, opts...)
|
return c.coll.InsertMany(ctx, documents, opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *CustomCollection) BulkWrite(ctx context.Context, models []mongo.WriteModel,
|
||||||
|
opts ...*options.BulkWriteOptions) (*mongo.BulkWriteResult, error) {
|
||||||
|
CheckConnect()
|
||||||
|
if c.coll == nil {
|
||||||
|
c.coll = mongoDB.Database(config.Config.Database.Database).Collection(c.collName)
|
||||||
|
}
|
||||||
|
return c.coll.BulkWrite(ctx, models, opts...)
|
||||||
|
}
|
||||||
|
@ -5,15 +5,15 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"pcgamedb/model"
|
|
||||||
"go.mongodb.org/mongo-driver/bson"
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"pcgamedb/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Export() ([]byte, []byte, error) {
|
func Export() ([]byte, []byte, error) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
infos := []model.GameInfo{}
|
var infos []model.GameInfo
|
||||||
games := []model.GameItem{}
|
var games []model.GameItem
|
||||||
cursor, err := GameInfoCollection.Find(ctx, bson.M{})
|
cursor, err := GameInfoCollection.Find(ctx, bson.M{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
|
141
db/game.go
141
db/game.go
@ -6,7 +6,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -34,7 +33,9 @@ func GetGameItemsByAuthor(regex string) ([]*model.GameItem, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer cursor.Close(ctx)
|
defer func(cursor *mongo.Cursor, ctx context.Context) {
|
||||||
|
_ = cursor.Close(ctx)
|
||||||
|
}(cursor, ctx)
|
||||||
if cursor.Err() != nil {
|
if cursor.Err() != nil {
|
||||||
return nil, cursor.Err()
|
return nil, cursor.Err()
|
||||||
}
|
}
|
||||||
@ -61,7 +62,9 @@ func GetGameItemsByAuthorPagination(regex string, page int, pageSize int) ([]*mo
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
defer cursor.Close(ctx)
|
defer func(cursor *mongo.Cursor, ctx context.Context) {
|
||||||
|
_ = cursor.Close(ctx)
|
||||||
|
}(cursor, ctx)
|
||||||
if cursor.Err() != nil {
|
if cursor.Err() != nil {
|
||||||
return nil, 0, cursor.Err()
|
return nil, 0, cursor.Err()
|
||||||
}
|
}
|
||||||
@ -148,6 +151,33 @@ func SaveGameInfo(item *model.GameInfo) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func SaveGameInfos(items []*model.GameInfo) error {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
operations := make([]mongo.WriteModel, len(items))
|
||||||
|
for i, item := range items {
|
||||||
|
if item.ID.IsZero() {
|
||||||
|
item.ID = primitive.NewObjectID()
|
||||||
|
}
|
||||||
|
if item.CreatedAt.IsZero() {
|
||||||
|
item.CreatedAt = time.Now()
|
||||||
|
}
|
||||||
|
item.UpdatedAt = time.Now()
|
||||||
|
operations[i] = mongo.NewUpdateOneModel().
|
||||||
|
SetFilter(bson.D{{Key: "_id", Value: item.ID}}).
|
||||||
|
SetUpdate(bson.D{{Key: "$set", Value: item}}).
|
||||||
|
SetUpsert(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := options.BulkWrite().SetOrdered(false)
|
||||||
|
_, err := GameInfoCollection.BulkWrite(ctx, operations, opts)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func GetAllGameItems() ([]*model.GameItem, error) {
|
func GetAllGameItems() ([]*model.GameItem, error) {
|
||||||
var items []*model.GameItem
|
var items []*model.GameItem
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
@ -156,7 +186,9 @@ func GetAllGameItems() ([]*model.GameItem, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer cursor.Close(ctx)
|
defer func(cursor *mongo.Cursor, ctx context.Context) {
|
||||||
|
_ = cursor.Close(ctx)
|
||||||
|
}(cursor, ctx)
|
||||||
for cursor.Next(ctx) {
|
for cursor.Next(ctx) {
|
||||||
var game model.GameItem
|
var game model.GameItem
|
||||||
if err = cursor.Decode(&game); err != nil {
|
if err = cursor.Decode(&game); err != nil {
|
||||||
@ -205,7 +237,9 @@ func GetGameItemsByIDs(ids []primitive.ObjectID) ([]*model.GameItem, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer cursor.Close(ctx)
|
defer func(cursor *mongo.Cursor, ctx context.Context) {
|
||||||
|
_ = cursor.Close(ctx)
|
||||||
|
}(cursor, ctx)
|
||||||
for cursor.Next(ctx) {
|
for cursor.Next(ctx) {
|
||||||
var game model.GameItem
|
var game model.GameItem
|
||||||
if err = cursor.Decode(&game); err != nil {
|
if err = cursor.Decode(&game); err != nil {
|
||||||
@ -244,7 +278,9 @@ func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, in
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
defer cursor.Close(ctx)
|
defer func(cursor *mongo.Cursor, ctx context.Context) {
|
||||||
|
_ = cursor.Close(ctx)
|
||||||
|
}(cursor, ctx)
|
||||||
for cursor.Next(ctx) {
|
for cursor.Next(ctx) {
|
||||||
var game model.GameInfo
|
var game model.GameInfo
|
||||||
if err = cursor.Decode(&game); err != nil {
|
if err = cursor.Decode(&game); err != nil {
|
||||||
@ -313,6 +349,24 @@ func GetGameInfoByPlatformID(platform string, id int) (*model.GameInfo, error) {
|
|||||||
return &game, nil
|
return &game, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func HasGameItemOrganized(id primitive.ObjectID) (bool, []*model.GameInfo) {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
filter := bson.M{"games": id}
|
||||||
|
var res []*model.GameInfo
|
||||||
|
cursor, err := GameInfoCollection.Find(ctx, filter)
|
||||||
|
if err != nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
if err = cursor.All(ctx, &res); err != nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
if len(res) == 0 {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return true, res
|
||||||
|
}
|
||||||
|
|
||||||
func GetUnorganizedGameItems(num int) ([]*model.GameItem, error) {
|
func GetUnorganizedGameItems(num int) ([]*model.GameItem, error) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
@ -339,7 +393,9 @@ func GetUnorganizedGameItems(num int) ([]*model.GameItem, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer cursor.Close(ctx)
|
defer func(cursor *mongo.Cursor, ctx context.Context) {
|
||||||
|
_ = cursor.Close(ctx)
|
||||||
|
}(cursor, ctx)
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
for cursor.Next(ctx) {
|
||||||
var game model.GameItem
|
var game model.GameItem
|
||||||
@ -368,28 +424,33 @@ func GetGameInfoByID(id primitive.ObjectID) (*model.GameInfo, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func DeduplicateGameItems() ([]primitive.ObjectID, error) {
|
func DeduplicateGameItems() ([]primitive.ObjectID, error) {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
type queryRes struct {
|
type queryRes struct {
|
||||||
ID string `bson:"_id"`
|
ID string `bson:"_id"`
|
||||||
Total int `bson:"total"`
|
Count int `bson:"count"`
|
||||||
IDs []primitive.ObjectID `bson:"ids"`
|
IDs []primitive.ObjectID `bson:"ids"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var res []primitive.ObjectID
|
var res []primitive.ObjectID
|
||||||
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
var qres []queryRes
|
|
||||||
pipeline := mongo.Pipeline{
|
pipeline := mongo.Pipeline{
|
||||||
bson.D{{Key: "$group", Value: bson.D{
|
bson.D{{Key: "$group", Value: bson.D{
|
||||||
{Key: "_id", Value: "$download"},
|
{Key: "_id", Value: bson.D{
|
||||||
{Key: "total", Value: bson.D{{Key: "$sum", Value: 1}}},
|
{Key: "raw_name", Value: "$raw_name"},
|
||||||
|
{Key: "download", Value: "$download"},
|
||||||
|
}},
|
||||||
|
{Key: "count", Value: bson.D{{Key: "$sum", Value: 1}}},
|
||||||
{Key: "ids", Value: bson.D{{Key: "$push", Value: "$_id"}}},
|
{Key: "ids", Value: bson.D{{Key: "$push", Value: "$_id"}}},
|
||||||
}}},
|
}}},
|
||||||
bson.D{{Key: "$match", Value: bson.D{
|
bson.D{{Key: "$match", Value: bson.D{
|
||||||
{Key: "total", Value: bson.D{{Key: "$gt", Value: 1}}},
|
{Key: "count", Value: bson.D{{Key: "$gt", Value: 1}}},
|
||||||
}}},
|
}}},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var qres []queryRes
|
||||||
|
|
||||||
cursor, err := GameItemCollection.Aggregate(ctx, pipeline)
|
cursor, err := GameItemCollection.Aggregate(ctx, pipeline)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -397,34 +458,14 @@ func DeduplicateGameItems() ([]primitive.ObjectID, error) {
|
|||||||
if err = cursor.All(ctx, &qres); err != nil {
|
if err = cursor.All(ctx, &qres); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, item := range qres {
|
for _, item := range qres {
|
||||||
idsToDelete := item.IDs[:len(item.IDs)-1]
|
res = append(res, item.IDs[1:]...)
|
||||||
res = append(res, idsToDelete...)
|
}
|
||||||
_, err = GameItemCollection.DeleteMany(ctx, bson.D{{Key: "_id", Value: bson.D{{Key: "$in", Value: idsToDelete}}}})
|
err = DeleteGameItemsByIDs(res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
cursor, err := GameInfoCollection.Find(ctx, bson.M{"games": bson.M{"$in": idsToDelete}})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var infos []*model.GameInfo
|
|
||||||
if err := cursor.All(ctx, &infos); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, info := range infos {
|
|
||||||
newGames := make([]primitive.ObjectID, 0, len(info.GameIDs))
|
|
||||||
for _, id := range info.GameIDs {
|
|
||||||
if !slices.Contains(idsToDelete, id) {
|
|
||||||
newGames = append(newGames, id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
info.GameIDs = newGames
|
|
||||||
if err := SaveGameInfo(info); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_, _ = CleanOrphanGamesInGameInfos()
|
_, _ = CleanOrphanGamesInGameInfos()
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
@ -690,6 +731,28 @@ func DeleteGameItemByID(id primitive.ObjectID) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func DeleteGameItemsByIDs(ids []primitive.ObjectID) error {
|
||||||
|
if len(ids) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
_, err := GameItemCollection.DeleteMany(ctx, bson.M{"_id": bson.M{"$in": ids}})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
update := bson.D{{Key: "$pull", Value: bson.D{
|
||||||
|
{Key: "games", Value: bson.D{
|
||||||
|
{Key: "$in", Value: ids},
|
||||||
|
}},
|
||||||
|
}}}
|
||||||
|
_, err = GameInfoCollection.UpdateMany(ctx, bson.M{}, update)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func GetAllAuthors() ([]string, error) {
|
func GetAllAuthors() ([]string, error) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
@ -9,7 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func ImportGameInfo(filePath string) error {
|
func ImportGameInfo(filePath string) error {
|
||||||
gameInfo := []*model.GameInfo{}
|
var gameInfo []*model.GameInfo
|
||||||
data, err := os.ReadFile(filePath)
|
data, err := os.ReadFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -27,7 +27,7 @@ func ImportGameInfo(filePath string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func ImportGameItem(filePath string) error {
|
func ImportGameItem(filePath string) error {
|
||||||
gameItem := []*model.GameItem{}
|
var gameItem []*model.GameItem
|
||||||
data, err := os.ReadFile(filePath)
|
data, err := os.ReadFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
services:
|
services:
|
||||||
pcgamedb:
|
pcgamedb:
|
||||||
build: .
|
image: nite07/pcgamedb
|
||||||
container_name: pcgamedb
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- 127.0.0.1:8080:8080
|
- "8080:8080"
|
||||||
environment:
|
environment:
|
||||||
- LOG_LEVEL=info
|
- LOG_LEVEL=info
|
||||||
- SERVER_PORT=8080
|
- SERVER_PORT=8080
|
||||||
@ -18,7 +17,6 @@ services:
|
|||||||
- REDIS_DB=0
|
- REDIS_DB=0
|
||||||
# Read more about environment variables: config/config.go
|
# Read more about environment variables: config/config.go
|
||||||
pcgamedb-mongodb:
|
pcgamedb-mongodb:
|
||||||
container_name: pcgamedb-mongodb
|
|
||||||
image: mongo:latest
|
image: mongo:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
@ -28,7 +26,6 @@ services:
|
|||||||
- ./mongodb:/data/db
|
- ./mongodb:/data/db
|
||||||
pcgamedb-redis:
|
pcgamedb-redis:
|
||||||
image: redis:latest
|
image: redis:latest
|
||||||
container_name: pcgamedb-redis
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./redis:/data
|
- ./redis:/data
|
||||||
command: redis-server --appendonly yes
|
command: redis-server --appendonly yes
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -7,7 +7,7 @@ import (
|
|||||||
"github.com/gin-contrib/cors"
|
"github.com/gin-contrib/cors"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
docs "pcgamedb/docs"
|
"pcgamedb/docs"
|
||||||
|
|
||||||
swaggerfiles "github.com/swaggo/files"
|
swaggerfiles "github.com/swaggo/files"
|
||||||
ginSwagger "github.com/swaggo/gin-swagger"
|
ginSwagger "github.com/swaggo/gin-swagger"
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"pcgamedb/model"
|
"pcgamedb/model"
|
||||||
"pcgamedb/utils"
|
"pcgamedb/utils"
|
||||||
|
|
||||||
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -46,8 +47,17 @@ func Crawl(logger *zap.Logger) {
|
|||||||
Clean(logger)
|
Clean(logger)
|
||||||
|
|
||||||
// trigger webhooks
|
// trigger webhooks
|
||||||
infos := []*model.GameInfo{}
|
var ids []primitive.ObjectID
|
||||||
for _, game := range games {
|
for _, game := range games {
|
||||||
|
ids = append(ids, game.ID)
|
||||||
|
}
|
||||||
|
items, err := db.GetGameItemsByIDs(ids)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to get game items", zap.Error(err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var infos []*model.GameInfo
|
||||||
|
for _, game := range items {
|
||||||
info, err := db.GetGameInfoByGameItemID(game.ID)
|
info, err := db.GetGameInfoByGameItemID(game.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Failed to get game info", zap.Error(err))
|
logger.Error("Failed to get game info", zap.Error(err))
|
||||||
|
@ -55,7 +55,7 @@ func CCSWAFSession(ccsUrl string, requestUrl string) (*WAFSession, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if response.Code != 200 {
|
if response.Code != 200 {
|
||||||
return nil, errors.New("Failed to get WAF session")
|
return nil, errors.New("failed to get WAF session")
|
||||||
}
|
}
|
||||||
return &response, nil
|
return &response, nil
|
||||||
}
|
}
|
||||||
@ -84,7 +84,7 @@ func CCSSource(ccsUrl string, requestUrl string) (string, error) {
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if ccsResp.Code != 200 {
|
if ccsResp.Code != 200 {
|
||||||
return "", errors.New("Failed to get source")
|
return "", errors.New("failed to get source")
|
||||||
}
|
}
|
||||||
return ccsResp.Source, nil
|
return ccsResp.Source, nil
|
||||||
}
|
}
|
||||||
@ -113,7 +113,7 @@ func CCSTurnstileToken(ccsUrl string, requestUrl string, siteKey string) (string
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if ccsResp.Code != 200 {
|
if ccsResp.Code != 200 {
|
||||||
return "", errors.New("Failed to get source")
|
return "", errors.New("failed to get source")
|
||||||
}
|
}
|
||||||
return ccsResp.Token, nil
|
return ccsResp.Token, nil
|
||||||
}
|
}
|
||||||
@ -141,7 +141,7 @@ func CCSTurnstileMaxToken(ccsUrl string, requestUrl string) (string, error) {
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if ccsResp.Code != 200 {
|
if ccsResp.Code != 200 {
|
||||||
return "", errors.New("Failed to get source")
|
return "", errors.New("failed to get source")
|
||||||
}
|
}
|
||||||
return ccsResp.Token, nil
|
return ccsResp.Token, nil
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ func ConvertTorrentToMagnet(torrent []byte) (string, string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", "", err
|
return "", "", err
|
||||||
}
|
}
|
||||||
var size uint64 = uint64(info.Length)
|
var size = uint64(info.Length)
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
for _, file := range info.Files {
|
for _, file := range info.Files {
|
||||||
size += uint64(file.Length)
|
size += uint64(file.Length)
|
||||||
|
@ -39,7 +39,7 @@ func MegaDownload(url string, path string) (string, []string, error) {
|
|||||||
pathRegex := regexp.MustCompile(`(?i)Download finished: (.*)`)
|
pathRegex := regexp.MustCompile(`(?i)Download finished: (.*)`)
|
||||||
pathRegexRes := pathRegex.FindAllStringSubmatch(out.String(), -1)
|
pathRegexRes := pathRegex.FindAllStringSubmatch(out.String(), -1)
|
||||||
if len(pathRegexRes) == 0 {
|
if len(pathRegexRes) == 0 {
|
||||||
return "", nil, errors.New("Mega download failed")
|
return "", nil, errors.New("mega download failed")
|
||||||
}
|
}
|
||||||
pathRegexRes[0][1] = strings.TrimSpace(pathRegexRes[0][1])
|
pathRegexRes[0][1] = strings.TrimSpace(pathRegexRes[0][1])
|
||||||
res, err := walkDir(pathRegexRes[0][1])
|
res, err := walkDir(pathRegexRes[0][1])
|
||||||
@ -54,7 +54,7 @@ func walkDir(path string) ([]string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
res := []string{}
|
var res []string
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
if file.IsDir() {
|
if file.IsDir() {
|
||||||
subFiles, err := walkDir(filepath.Join(path, file.Name()))
|
subFiles, err := walkDir(filepath.Join(path, file.Name()))
|
||||||
|
@ -33,7 +33,7 @@ func padStart(s string, minLength int, padRune rune) string {
|
|||||||
|
|
||||||
func DecryptPrivateBin(url string, password string) (string, error) {
|
func DecryptPrivateBin(url string, password string) (string, error) {
|
||||||
if !strings.Contains(url, "#") {
|
if !strings.Contains(url, "#") {
|
||||||
return "", errors.New("Missing Decrypt Key")
|
return "", errors.New("missing Decrypt Key")
|
||||||
}
|
}
|
||||||
key := strings.Split(url, "#")[1]
|
key := strings.Split(url, "#")[1]
|
||||||
resp, err := Fetch(FetchConfig{
|
resp, err := Fetch(FetchConfig{
|
||||||
|
@ -1,21 +1,20 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
func min(a, b, c int) int {
|
func minInt(nums ...int) int {
|
||||||
if a < b {
|
m := nums[0]
|
||||||
if a < c {
|
for _, num := range nums {
|
||||||
return a
|
if num < m {
|
||||||
|
m = num
|
||||||
}
|
}
|
||||||
return c
|
|
||||||
}
|
}
|
||||||
if b < c {
|
return m
|
||||||
return b
|
|
||||||
}
|
|
||||||
return c
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func LevenshteinDistance(str1, str2 string) int {
|
func levenshteinDistance(str1, str2 string) int {
|
||||||
str1 = strings.ToLower(str1)
|
str1 = strings.ToLower(str1)
|
||||||
str2 = strings.ToLower(str2)
|
str2 = strings.ToLower(str2)
|
||||||
s1, s2 := []rune(str1), []rune(str2)
|
s1, s2 := []rune(str1), []rune(str2)
|
||||||
@ -45,7 +44,7 @@ func LevenshteinDistance(str1, str2 string) int {
|
|||||||
if s1[i-1] != s2[j-1] {
|
if s1[i-1] != s2[j-1] {
|
||||||
cost = 1
|
cost = 1
|
||||||
}
|
}
|
||||||
d[i][j] = min(d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1]+cost)
|
d[i][j] = minInt(d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1]+cost)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,7 +54,7 @@ func LevenshteinDistance(str1, str2 string) int {
|
|||||||
func Similarity(str1, str2 string) float64 {
|
func Similarity(str1, str2 string) float64 {
|
||||||
str1 = strings.ReplaceAll(str1, " ", "")
|
str1 = strings.ReplaceAll(str1, " ", "")
|
||||||
str2 = strings.ReplaceAll(str2, " ", "")
|
str2 = strings.ReplaceAll(str2, " ", "")
|
||||||
distance := LevenshteinDistance(str1, str2)
|
distance := levenshteinDistance(str1, str2)
|
||||||
maxLength := len(str1)
|
maxLength := len(str1)
|
||||||
if len(str2) > maxLength {
|
if len(str2) > maxLength {
|
||||||
maxLength = len(str2)
|
maxLength = len(str2)
|
||||||
|
@ -17,10 +17,18 @@ func SizeToBytes(size string) (uint64, error) {
|
|||||||
"TB": 1024 * 1024 * 1024 * 1024,
|
"TB": 1024 * 1024 * 1024 * 1024,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unitsSlice := []string{
|
||||||
|
"TB",
|
||||||
|
"GB",
|
||||||
|
"MB",
|
||||||
|
"KB",
|
||||||
|
"B",
|
||||||
|
}
|
||||||
|
|
||||||
var unit string
|
var unit string
|
||||||
var value float64
|
var value float64
|
||||||
|
|
||||||
for u := range units {
|
for _, u := range unitsSlice {
|
||||||
if strings.HasSuffix(size, u) {
|
if strings.HasSuffix(size, u) {
|
||||||
unit = u
|
unit = u
|
||||||
numStr := strings.TrimSuffix(size, u)
|
numStr := strings.TrimSuffix(size, u)
|
||||||
|
Loading…
Reference in New Issue
Block a user