fix CleanOrphanGamesInGameInfos

mod SearchGameInfo
fix trigger body miss data
disable goggames crawler
This commit is contained in:
Nite07 2024-11-18 20:21:08 +08:00
parent ebac45ccd2
commit f8e3265a76
11 changed files with 309 additions and 81 deletions

View File

@ -4,9 +4,8 @@ const (
C1337xBaseURL = "https://www.1337x.to" C1337xBaseURL = "https://www.1337x.to"
FreeGOGListURL = "https://freegogpcgames.com/a-z-games-list" FreeGOGListURL = "https://freegogpcgames.com/a-z-games-list"
GOGGamesBaseURL = "https://www.gog-games.to" GOGGamesBaseURL = "https://www.gog-games.to"
GOGGamesURL = "https://www.gog-games.to/search/all/%v/date/desc/any" GOGGamesURL = "https://www.gog-games.to/search?page=%v&search=&is_new=false&is_updated=true&in_dev_filter=none&sort_by=last_update_desc"
GOGSearchURL = "https://embed.gog.com/games/ajax/filtered" GOGGamesPageURL = "https://www.gog-games.to/api/v1/games/%s"
GOGDetailsURL = "https://api.gog.com/products"
SteamSearchURL = "https://store.steampowered.com/search" SteamSearchURL = "https://store.steampowered.com/search"
SteamAppDetailURL = "https://store.steampowered.com/api/appdetails" SteamAppDetailURL = "https://store.steampowered.com/api/appdetails"
SteamAllAppsURL = "https://api.steampowered.com/ISteamApps/GetAppList/v2/?format=json" SteamAllAppsURL = "https://api.steampowered.com/ISteamApps/GetAppList/v2/?format=json"

View File

@ -105,7 +105,7 @@ func (c *ARMGDDNCrawler) crawlGames(data []GameData, platform string, num int) (
} }
c.logger.Info("Crawling", zap.String("url", u)) c.logger.Info("Crawling", zap.String("url", u))
walker := c.conn.Walk(path) walker := c.conn.Walk(path)
size := int64(0) size := uint64(0)
for walker.Next() { for walker.Next() {
if walker.Stat().Type == ftp.EntryTypeFile { if walker.Stat().Type == ftp.EntryTypeFile {
fileSize, err := c.conn.FileSize(walker.Path()) fileSize, err := c.conn.FileSize(walker.Path())
@ -113,7 +113,7 @@ func (c *ARMGDDNCrawler) crawlGames(data []GameData, platform string, num int) (
c.logger.Warn("file size error", zap.Error(err)) c.logger.Warn("file size error", zap.Error(err))
break break
} }
size += fileSize size += uint64(fileSize)
} }
} }
item, err := db.GetGameItemByUrl(u) item, err := db.GetGameItemByUrl(u)
@ -123,7 +123,7 @@ func (c *ARMGDDNCrawler) crawlGames(data []GameData, platform string, num int) (
item.Url = u item.Url = u
item.Name = ARMGDDNFormatter(v.FolderName) item.Name = ARMGDDNFormatter(v.FolderName)
item.UpdateFlag = updateFlag item.UpdateFlag = updateFlag
item.Size = utils.FormatSize(size) item.Size = utils.BytesToSize(size)
item.RawName = v.FolderName item.RawName = v.FolderName
item.Author = "ARMGDDN" item.Author = "ARMGDDN"
item.Download = fmt.Sprintf("ftpes://%s:%s@%s/%s/%s", ftpUsername, ftpPassword, ftpAddress, platform, url.QueryEscape(v.FolderName)) item.Download = fmt.Sprintf("ftpes://%s:%s@%s/%s/%s", ftpUsername, ftpPassword, ftpAddress, platform, url.QueryEscape(v.FolderName))

View File

@ -32,7 +32,7 @@ func BuildCrawlerMap(logger *zap.Logger) map[string]Crawler {
"onlinefix": NewOnlineFixCrawler(logger), "onlinefix": NewOnlineFixCrawler(logger),
"steamrip": NewSteamRIPCrawler(logger), "steamrip": NewSteamRIPCrawler(logger),
// "armgddn": NewARMGDDNCrawler(logger), // "armgddn": NewARMGDDNCrawler(logger),
"goggames": NewGOGGamesCrawler(logger), // "goggames": NewGOGGamesCrawler(logger),
"chovka": NewChovkaCrawler(logger), "chovka": NewChovkaCrawler(logger),
// "gnarly": NewGnarlyCrawler(logger), // "gnarly": NewGnarlyCrawler(logger),
} }

View File

@ -1,19 +1,16 @@
package crawler package crawler
import ( import (
"bytes" "encoding/json"
"errors"
"fmt" "fmt"
"regexp"
"strconv"
"strings" "strings"
"time"
"github.com/nitezs/pcgamedb/constant" "github.com/nitezs/pcgamedb/constant"
"github.com/nitezs/pcgamedb/db" "github.com/nitezs/pcgamedb/db"
"github.com/nitezs/pcgamedb/model" "github.com/nitezs/pcgamedb/model"
"github.com/nitezs/pcgamedb/utils" "github.com/nitezs/pcgamedb/utils"
"github.com/PuerkitoBio/goquery"
"go.uber.org/zap" "go.uber.org/zap"
) )
@ -38,33 +35,37 @@ func (c *GOGGamesCrawler) CrawlByUrl(url string) (*model.GameItem, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) data := gameResult{}
err = json.Unmarshal(resp.Data, &data)
if err != nil { if err != nil {
return nil, err return nil, err
} }
name := strings.TrimSpace(doc.Find("#game-details>.container>h1").First().Text())
magnetRegex := regexp.MustCompile(`magnet:\?[^"]*`) name := data.Title
magnetRegexRes := magnetRegex.FindString(string(resp.Data)) links := make([]string, 0)
if magnetRegexRes == "" { for _, link := range data.Links.Game.Gofile.Links {
return nil, errors.New("magnet not found") links = append(links, link.Link)
} }
sizeStrs := make([]string, 0) if len(data.Links.Patch.Gofile.Links) > 0 {
doc.Find(".container>.items-group").First().Find(".filesize").Each(func(i int, s *goquery.Selection) { for _, link := range data.Links.Patch.Gofile.Links {
sizeStrs = append(sizeStrs, s.Text()) links = append(links, link.Link)
})
size, err := utils.SubSizeStrings(sizeStrs)
if err != nil {
return nil, err
} }
}
size := uint64(0)
for _, file := range data.Files.Game {
s, _ := utils.SizeToBytes(file.Size)
size += s
}
item, err := db.GetGameItemByUrl(url) item, err := db.GetGameItemByUrl(url)
if err != nil { if err != nil {
return nil, err return nil, err
} }
item.Name = name item.Name = name
item.RawName = name item.RawName = name
item.Download = magnetRegexRes item.Download = strings.Join(links, ",")
item.Url = url item.Url = url
item.Size = size item.Size = utils.BytesToSize(size)
item.Author = "GOGGames" item.Author = "GOGGames"
return item, nil return item, nil
} }
@ -76,21 +77,23 @@ func (c *GOGGamesCrawler) Crawl(page int) ([]*model.GameItem, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) data := searchResult{}
err = json.Unmarshal(resp.Data, &data)
if err != nil { if err != nil {
return nil, err return nil, err
} }
urls := make([]string, 0) urls := make([]string, 0)
doc.Find(".game-blocks>a").Each(func(i int, s *goquery.Selection) { updateFlags := []string{} //link+date
u, exist := s.Attr("href") for _, item := range data.Data {
if !exist { urls = append(urls, fmt.Sprintf(constant.GOGGamesPageURL, item.Slug))
return updateFlags = append(updateFlags, fmt.Sprintf("%s%s", item.GogURL, item.LastUpdate))
} }
urls = append(urls, fmt.Sprintf("%s%s", constant.GOGGamesBaseURL, u))
})
res := make([]*model.GameItem, 0) res := make([]*model.GameItem, 0)
for _, u := range urls { for i, u := range urls {
c.logger.Info("Crawling", zap.String("URL", u)) c.logger.Info("Crawling", zap.String("URL", u))
if db.IsGameCrawled(updateFlags[i], "GOGGames") {
continue
}
item, err := c.CrawlByUrl(u) item, err := c.CrawlByUrl(u)
if err != nil { if err != nil {
c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u)) c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
@ -149,10 +152,191 @@ func (c *GOGGamesCrawler) GetTotalPageNum() (int, error) {
if err != nil { if err != nil {
return 0, err return 0, err
} }
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(resp.Data)) data := searchResult{}
err = json.Unmarshal(resp.Data, &data)
if err != nil { if err != nil {
return 0, err return 0, err
} }
btns := doc.Find(".pagination>.btn") return data.Meta.LastPage, nil
return strconv.Atoi(strings.TrimSpace(btns.Eq(btns.Length() - 2).Text())) }
type searchResult struct {
Data []struct {
ID string `json:"id"`
Slug string `json:"slug"`
Title string `json:"title"`
Image string `json:"image"`
Background string `json:"background"`
GogURL string `json:"gog_url"`
IsIndev bool `json:"is_indev"`
IsNew bool `json:"is_new"`
IsUpdated bool `json:"is_updated"`
IsQueued bool `json:"is_queued"`
IsUploading bool `json:"is_uploading"`
VotedOn bool `json:"voted_on"`
LastUpdate time.Time `json:"last_update"`
Md5Filename string `json:"md5_filename"`
Infohash string `json:"infohash"`
IsVotable bool `json:"is_votable"`
} `json:"data"`
Links struct {
First string `json:"first"`
Last string `json:"last"`
Prev any `json:"prev"`
Next string `json:"next"`
} `json:"links"`
Meta struct {
CurrentPage int `json:"current_page"`
From int `json:"from"`
LastPage int `json:"last_page"`
Links []struct {
URL any `json:"url"`
Label string `json:"label"`
Active bool `json:"active"`
} `json:"links"`
Path string `json:"path"`
PerPage int `json:"per_page"`
To int `json:"to"`
Total int `json:"total"`
} `json:"meta"`
}
type gameResult struct {
ID string `json:"id"`
Slug string `json:"slug"`
Title string `json:"title"`
Image string `json:"image"`
Background string `json:"background"`
GogURL string `json:"gog_url"`
IsIndev bool `json:"is_indev"`
IsNew bool `json:"is_new"`
IsUpdated bool `json:"is_updated"`
IsQueued bool `json:"is_queued"`
IsUploading bool `json:"is_uploading"`
VotedOn bool `json:"voted_on"`
LastUpdate time.Time `json:"last_update"`
Md5Filename string `json:"md5_filename"`
Infohash string `json:"infohash"`
Links struct {
Goodie struct {
OneFichier struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"1fichier"`
Vikingfile struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"vikingfile"`
Pixeldrain struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"pixeldrain"`
Gofile struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"gofile"`
} `json:"goodie"`
Game struct {
OneFichier struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"1fichier"`
Vikingfile struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"vikingfile"`
Pixeldrain struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"pixeldrain"`
Gofile struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"gofile"`
} `json:"game"`
Patch struct {
OneFichier struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"1fichier"`
Vikingfile struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"vikingfile"`
Pixeldrain struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"pixeldrain"`
Gofile struct {
ID string `json:"id"`
Name string `json:"name"`
Links []struct {
Label string `json:"label"`
Link string `json:"link"`
} `json:"links"`
} `json:"gofile"`
} `json:"patch"`
} `json:"links"`
Files struct {
Game []struct {
Name string `json:"name"`
Type string `json:"type"`
Size string `json:"size"`
} `json:"game"`
Goodie []struct {
Name string `json:"name"`
Type string `json:"type"`
Size string `json:"size"`
} `json:"goodie"`
Patch []struct {
Name string `json:"name"`
Type string `json:"type"`
Size string `json:"size"`
} `json:"patch"`
} `json:"files"`
IsVotable bool `json:"is_votable"`
} }

View File

@ -115,12 +115,12 @@ func (c *SteamRIPCrawler) Crawl(num int) ([]*model.GameItem, error) {
c.logger.Info("Crawling", zap.String("URL", u)) c.logger.Info("Crawling", zap.String("URL", u))
item, err := c.CrawlByUrl(u) item, err := c.CrawlByUrl(u)
if err != nil { if err != nil {
c.logger.Error("Failed to crawl", zap.Error(err), zap.String("URL", u)) c.logger.Warn("Failed to crawl", zap.Error(err), zap.String("URL", u))
continue continue
} }
item.UpdateFlag = updateFlags[i] item.UpdateFlag = updateFlags[i]
if err := db.SaveGameItem(item); err != nil { if err := db.SaveGameItem(item); err != nil {
c.logger.Error("Failed to save item", zap.Error(err)) c.logger.Warn("Failed to save item", zap.Error(err))
continue continue
} }
items = append(items, item) items = append(items, item)

View File

@ -16,7 +16,7 @@ import (
) )
const ( const (
gameDownloadCollectionName = "games" gameItemCollectionName = "games"
gameInfoCollectionName = "game_infos" gameInfoCollectionName = "game_infos"
) )
@ -24,7 +24,7 @@ var (
mongoDB *mongo.Client mongoDB *mongo.Client
mutx = &sync.RWMutex{} mutx = &sync.RWMutex{}
GameItemCollection = &CustomCollection{ GameItemCollection = &CustomCollection{
collName: gameDownloadCollectionName, collName: gameItemCollectionName,
} }
GameInfoCollection = &CustomCollection{ GameInfoCollection = &CustomCollection{
collName: gameInfoCollectionName, collName: gameInfoCollectionName,
@ -58,7 +58,7 @@ func connect() {
log.Logger.Info("Connected to MongoDB") log.Logger.Info("Connected to MongoDB")
mongoDB = client mongoDB = client
gameDownloadCollection := mongoDB.Database(config.Config.Database.Database).Collection(gameDownloadCollectionName) gameDownloadCollection := mongoDB.Database(config.Config.Database.Database).Collection(gameItemCollectionName)
gameInfoCollection := mongoDB.Database(config.Config.Database.Database).Collection(gameInfoCollectionName) gameInfoCollection := mongoDB.Database(config.Config.Database.Database).Collection(gameInfoCollectionName)
nameIndex := mongo.IndexModel{ nameIndex := mongo.IndexModel{

View File

@ -21,7 +21,7 @@ import (
) )
var ( var (
removeDelimiter = regexp.MustCompile(`[:\-\+]`) removeNoneAlphaNumeric = regexp.MustCompile(`^[A-Za-z0-9]`)
removeRepeatingSpacesRegex = regexp.MustCompile(`\s+`) removeRepeatingSpacesRegex = regexp.MustCompile(`\s+`)
) )
@ -221,7 +221,7 @@ func GetGameItemsByIDs(ids []primitive.ObjectID) ([]*model.GameItem, error) {
func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, int, error) { func SearchGameInfos(name string, page int, pageSize int) ([]*model.GameInfo, int, error) {
var items []*model.GameInfo var items []*model.GameInfo
name = removeDelimiter.ReplaceAllString(name, " ") name = removeNoneAlphaNumeric.ReplaceAllString(name, " ")
name = removeRepeatingSpacesRegex.ReplaceAllString(name, " ") name = removeRepeatingSpacesRegex.ReplaceAllString(name, " ")
name = strings.TrimSpace(name) name = strings.TrimSpace(name)
name = strings.Replace(name, " ", ".*", -1) name = strings.Replace(name, " ", ".*", -1)
@ -435,7 +435,7 @@ func CleanOrphanGamesInGameInfos() (map[primitive.ObjectID]primitive.ObjectID, e
pipeline := mongo.Pipeline{ pipeline := mongo.Pipeline{
bson.D{{Key: "$unwind", Value: "$games"}}, bson.D{{Key: "$unwind", Value: "$games"}},
bson.D{{Key: "$lookup", Value: bson.D{ bson.D{{Key: "$lookup", Value: bson.D{
{Key: "from", Value: GameItemCollection}, {Key: "from", Value: gameItemCollectionName},
{Key: "localField", Value: "games"}, {Key: "localField", Value: "games"},
{Key: "foreignField", Value: "_id"}, {Key: "foreignField", Value: "_id"},
{Key: "as", Value: "gameDownloads"}, {Key: "as", Value: "gameDownloads"},

View File

@ -10,7 +10,7 @@ import (
) )
type SearchGamesRequest struct { type SearchGamesRequest struct {
Keyword string `form:"keyword" json:"keyword" binding:"required,min=4,max=64"` Keyword string `form:"keyword" json:"keyword" binding:"required,min=1,max=64"`
Page int `form:"page" json:"page"` Page int `form:"page" json:"page"`
PageSize int `form:"page_size" json:"page_size"` PageSize int `form:"page_size" json:"page_size"`
} }

View File

@ -24,12 +24,14 @@ func Crawl(logger *zap.Logger) {
logger.Warn("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err)) logger.Warn("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err))
} }
games = append(games, g...) games = append(games, g...)
logger.Info("Crawled games", zap.String("crawler", c.Name()), zap.Int("count", len(g)))
} else if c, ok := item.(crawler.SimpleCrawler); ok { } else if c, ok := item.(crawler.SimpleCrawler); ok {
g, err := c.CrawlAll() g, err := c.CrawlAll()
if err != nil { if err != nil {
logger.Warn("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err)) logger.Warn("Failed to crawl games", zap.String("crawler", c.Name()), zap.Error(err))
} }
games = append(games, g...) games = append(games, g...)
logger.Info("Crawled games", zap.String("crawler", c.Name()), zap.Int("count", len(g)))
} }
} }
logger.Info("Crawled finished", zap.Int("count", len(games))) logger.Info("Crawled finished", zap.Int("count", len(games)))
@ -44,7 +46,7 @@ func Crawl(logger *zap.Logger) {
Clean(logger) Clean(logger)
// trigger webhooks // trigger webhooks
infos := make([]*model.GameInfo, len(games)) infos := []*model.GameInfo{}
for _, game := range games { for _, game := range games {
info, err := db.GetGameInfoByGameItemID(game.ID) info, err := db.GetGameInfoByGameItemID(game.ID)
if err != nil { if err != nil {

View File

@ -2,7 +2,6 @@ package utils
import ( import (
"bytes" "bytes"
"fmt"
"strconv" "strconv"
"strings" "strings"
@ -18,39 +17,21 @@ func ConvertTorrentToMagnet(torrent []byte) (string, string, error) {
if err != nil { if err != nil {
return "", "", err return "", "", err
} }
var size int64 = info.Length var size uint64 = uint64(info.Length)
if size == 0 { if size == 0 {
for _, file := range info.Files { for _, file := range info.Files {
size += file.Length size += uint64(file.Length)
} }
} }
infoHash := minfo.HashInfoBytes() magnet, err := minfo.MagnetV2()
magnet := minfo.Magnet(&infoHash, &info) if err != nil {
return magnet.String(), FormatSize(size), nil return "", "", err
}
func FormatSize(size int64) string {
const (
_ = iota
KB int64 = 1 << (10 * iota)
MB
GB
TB
)
switch {
case size >= GB:
return fmt.Sprintf("%.1f GB", float64(size)/float64(GB))
case size >= MB:
return fmt.Sprintf("%.1f MB", float64(size)/float64(MB))
case size >= KB:
return fmt.Sprintf("%.1f KB", float64(size)/float64(KB))
default:
return fmt.Sprintf("%d Bytes", size)
} }
return magnet.String(), BytesToSize(size), nil
} }
func SubSizeStrings(sizes []string) (string, error) { func SubSizeStrings(sizes []string) (string, error) {
size := int64(0) size := uint64(0)
for _, sizeStr := range sizes { for _, sizeStr := range sizes {
sizeStr := strings.ToLower(sizeStr) sizeStr := strings.ToLower(sizeStr)
if strings.Contains(sizeStr, "gb") { if strings.Contains(sizeStr, "gb") {
@ -60,7 +41,7 @@ func SubSizeStrings(sizes []string) (string, error) {
if err != nil { if err != nil {
return "", err return "", err
} }
size += int64(addSize * 1024 * 1024 * 1024) size += uint64(addSize * 1024 * 1024 * 1024)
} }
if strings.Contains(sizeStr, "mb") { if strings.Contains(sizeStr, "mb") {
sizeStr = strings.ReplaceAll(sizeStr, "mb", "") sizeStr = strings.ReplaceAll(sizeStr, "mb", "")
@ -69,7 +50,7 @@ func SubSizeStrings(sizes []string) (string, error) {
if err != nil { if err != nil {
return "", err return "", err
} }
size += int64(addSize * 1024 * 1024) size += uint64(addSize * 1024 * 1024)
} }
if strings.Contains(sizeStr, "kb") { if strings.Contains(sizeStr, "kb") {
sizeStr = strings.ReplaceAll(sizeStr, "kb", "") sizeStr = strings.ReplaceAll(sizeStr, "kb", "")
@ -78,8 +59,8 @@ func SubSizeStrings(sizes []string) (string, error) {
if err != nil { if err != nil {
return "", err return "", err
} }
size += int64(addSize * 1024) size += uint64(addSize * 1024)
} }
} }
return FormatSize(size), nil return BytesToSize(size), nil
} }

62
utils/size.go Normal file
View File

@ -0,0 +1,62 @@
package utils
import (
"fmt"
"strconv"
"strings"
)
func SizeToBytes(size string) (uint64, error) {
size = strings.TrimSpace(strings.ToUpper(size))
units := map[string]uint64{
"B": 1,
"KB": 1024,
"MB": 1024 * 1024,
"GB": 1024 * 1024 * 1024,
"TB": 1024 * 1024 * 1024 * 1024,
}
var unit string
var value float64
for u := range units {
if strings.HasSuffix(size, u) {
unit = u
numStr := strings.TrimSuffix(size, u)
val, err := strconv.ParseFloat(strings.TrimSpace(numStr), 64)
if err != nil {
return 0, err
}
value = val
break
}
}
if unit == "" {
return 0, fmt.Errorf("invalid unit in size: %s", size)
}
bytes := uint64(value * float64(units[unit]))
return bytes, nil
}
func BytesToSize(size uint64) string {
const (
_ = iota
KB uint64 = 1 << (10 * iota)
MB
GB
TB
)
switch {
case size >= GB:
return fmt.Sprintf("%.1f GB", float64(size)/float64(GB))
case size >= MB:
return fmt.Sprintf("%.1f MB", float64(size)/float64(MB))
case size >= KB:
return fmt.Sprintf("%.1f KB", float64(size)/float64(KB))
default:
return fmt.Sprintf("%d Bytes", size)
}
}