2024-09-24 06:17:11 -04:00
|
|
|
package config
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"encoding/json"
|
|
|
|
"os"
|
|
|
|
"os/exec"
|
|
|
|
"reflect"
|
|
|
|
"strconv"
|
2024-11-14 06:04:45 -05:00
|
|
|
"strings"
|
2024-09-24 06:17:11 -04:00
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
type config struct {
|
2024-11-19 21:40:33 -05:00
|
|
|
LogLevel string `env:"LOG_LEVEL" json:"log_level"`
|
|
|
|
Server server `json:"server"`
|
|
|
|
Database database `json:"database"`
|
|
|
|
Redis redis `json:"redis"`
|
|
|
|
OnlineFix onlinefix `json:"online_fix"`
|
|
|
|
Twitch twitch `json:"twitch"`
|
|
|
|
Webhooks webhooks `json:"webhooks"`
|
|
|
|
CFClearanceScraper cfClearanceScraper `json:"cf_clearance_scraper"`
|
2024-09-24 06:17:11 -04:00
|
|
|
MegaAvaliable bool
|
|
|
|
}
|
2024-11-19 21:40:33 -05:00
|
|
|
|
|
|
|
type cfClearanceScraper struct {
|
|
|
|
Url string `env:"CF_CLEARANCE_SCRAPER_URL" json:"url"`
|
|
|
|
}
|
2024-09-24 06:17:11 -04:00
|
|
|
|
2024-11-14 05:57:11 -05:00
|
|
|
type webhooks struct {
|
2024-11-14 06:22:52 -05:00
|
|
|
CrawlTask []string `env:"WEBHOOKS_CRAWL_TASK" json:"crawl_task"`
|
2024-11-14 05:57:11 -05:00
|
|
|
}
|
|
|
|
|
2024-09-24 06:17:11 -04:00
|
|
|
type server struct {
|
2024-11-17 03:53:47 -05:00
|
|
|
Port string `env:"SERVER_PORT" json:"port"`
|
|
|
|
SecretKey string `env:"SERVER_SECRET_KEY" json:"secret_key"`
|
|
|
|
AutoCrawl bool `env:"SERVER_AUTO_CRAWL" json:"auto_crawl"`
|
|
|
|
AutoCrawlCron string `env:"SERVER_AUTO_CRAWL_CRON" json:"auto_crawl_cron"`
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type database struct {
|
|
|
|
Host string `env:"DATABASE_HOST" json:"host"`
|
|
|
|
Port int `env:"DATABASE_PORT" json:"port"`
|
|
|
|
User string `env:"DATABASE_USER" json:"user"`
|
|
|
|
Password string `env:"DATABASE_PASSWORD" json:"password"`
|
|
|
|
Database string `env:"DATABASE_NAME" json:"database"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type twitch struct {
|
|
|
|
ClientID string `env:"TWITCH_CLIENT_ID" json:"client_id"`
|
|
|
|
ClientSecret string `env:"TWITCH_CLIENT_SECRET" json:"client_secret"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type redis struct {
|
|
|
|
Host string `env:"REDIS_HOST" json:"host"`
|
|
|
|
Port int `env:"REDIS_PORT" json:"port"`
|
|
|
|
Password string `env:"REDIS_PASSWORD" json:"password"`
|
|
|
|
DBIndex int `env:"REDIS_DB" json:"db_index"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type onlinefix struct {
|
|
|
|
User string `env:"ONLINEFIX_USER" json:"user"`
|
|
|
|
Password string `env:"ONLINEFIX_PASSWORD" json:"password"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type runtimeConfig struct {
|
|
|
|
ServerStartTime time.Time
|
|
|
|
}
|
|
|
|
|
|
|
|
var Config config
|
|
|
|
var Runtime runtimeConfig
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
Config = config{
|
|
|
|
LogLevel: "info",
|
|
|
|
Database: database{
|
|
|
|
Port: 27017,
|
|
|
|
User: "root",
|
|
|
|
Password: "password",
|
|
|
|
},
|
|
|
|
MegaAvaliable: TestMega(),
|
2024-11-17 03:53:47 -05:00
|
|
|
Server: server{
|
|
|
|
AutoCrawlCron: "0 */3 * * *",
|
|
|
|
},
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
if _, err := os.Stat("config.json"); err == nil {
|
|
|
|
configData, err := os.ReadFile("config.json")
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
err = json.Unmarshal(configData, &Config)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
loadEnvVariables(&Config)
|
2024-11-20 11:59:17 -05:00
|
|
|
|
2024-12-04 12:36:55 -05:00
|
|
|
if Config.OnlineFix.User == "" || Config.OnlineFix.Password == "" {
|
|
|
|
panic("Need OnlineFix User and Password")
|
|
|
|
}
|
|
|
|
if Config.Redis.Host == "" {
|
|
|
|
panic("Need Redis Host")
|
|
|
|
}
|
|
|
|
if Config.Database.Database == "" || Config.Database.Host == "" {
|
|
|
|
panic("Need Database Name and Host")
|
|
|
|
}
|
|
|
|
if Config.CFClearanceScraper.Url == "" {
|
|
|
|
panic("Need CF Clearance Scraper URL")
|
|
|
|
}
|
|
|
|
if !strings.HasSuffix(Config.CFClearanceScraper.Url, "/cf-clearance-scraper") {
|
2024-11-20 11:59:17 -05:00
|
|
|
Config.CFClearanceScraper.Url += "/cf-clearance-scraper"
|
|
|
|
}
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func loadEnvVariables(cfg interface{}) {
|
|
|
|
v := reflect.ValueOf(cfg).Elem()
|
|
|
|
t := v.Type()
|
|
|
|
for i := 0; i < v.NumField(); i++ {
|
|
|
|
field := t.Field(i)
|
|
|
|
envTag := field.Tag.Get("env")
|
|
|
|
if envTag == "" || envTag == "-" {
|
|
|
|
if field.Type.Kind() == reflect.Struct {
|
|
|
|
loadEnvVariables(v.Field(i).Addr().Interface())
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
envValue := os.Getenv(envTag)
|
|
|
|
if envValue == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
switch field.Type.Kind() {
|
|
|
|
case reflect.String:
|
|
|
|
v.Field(i).SetString(envValue)
|
|
|
|
case reflect.Int:
|
|
|
|
if value, err := strconv.Atoi(envValue); err == nil {
|
|
|
|
v.Field(i).SetInt(int64(value))
|
|
|
|
}
|
|
|
|
case reflect.Bool:
|
|
|
|
if value, err := strconv.ParseBool(envValue); err == nil {
|
|
|
|
v.Field(i).SetBool(value)
|
|
|
|
}
|
2024-11-14 06:04:45 -05:00
|
|
|
case reflect.Slice:
|
|
|
|
if field.Type.Elem().Kind() == reflect.String {
|
|
|
|
envValueSlice := strings.Split(envValue, ",")
|
|
|
|
v.Field(i).Set(reflect.ValueOf(envValueSlice))
|
|
|
|
}
|
2024-09-24 06:17:11 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestMega() bool {
|
|
|
|
cmd := exec.Command("mega-get", "--help")
|
|
|
|
var out bytes.Buffer
|
|
|
|
cmd.Stdout = &out
|
|
|
|
err := cmd.Run()
|
|
|
|
return err == nil
|
|
|
|
}
|