added config values to configure enabled meta-search engines
This commit is contained in:
parent
dc4a3a4bec
commit
5e6fc73038
7 changed files with 184 additions and 88 deletions
59
config.go
59
config.go
|
@ -22,6 +22,13 @@ type CacheConfig struct {
|
|||
Path string
|
||||
}
|
||||
|
||||
type MetaSearchConfig struct {
|
||||
Text []string
|
||||
Image []string
|
||||
Files []string
|
||||
Video []string
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Port int // Added
|
||||
AuthCode string // Added
|
||||
|
@ -47,6 +54,9 @@ type Config struct {
|
|||
CrawlingInterval time.Duration // Refres crawled results in...
|
||||
MaxPagesPerDomain int // Max pages to crawl per domain
|
||||
IndexBatchSize int
|
||||
LibreXInstances []string
|
||||
|
||||
MetaSearch MetaSearchConfig
|
||||
|
||||
DriveCache CacheConfig
|
||||
RamCache CacheConfig
|
||||
|
@ -75,6 +85,33 @@ var defaultConfig = Config{
|
|||
MaxPagesPerDomain: 10,
|
||||
IndexBatchSize: 50,
|
||||
LogLevel: 1,
|
||||
LibreXInstances: []string{"librex.antopie.org"},
|
||||
MetaSearch: MetaSearchConfig{
|
||||
// For Text search (skip SearXNG and LibreX by default, as that would be mega stupid)
|
||||
Text: []string{"Google", "Brave", "DuckDuckGo"},
|
||||
|
||||
// For Image search
|
||||
Image: []string{"Qwant", "Bing", "DeviantArt"},
|
||||
|
||||
// For Files search
|
||||
Files: []string{"TorrentGalaxy", "ThePirateBay"},
|
||||
|
||||
// For Video (piped instances)
|
||||
Video: []string{
|
||||
"api.piped.yt",
|
||||
"pipedapi.moomoo.me",
|
||||
"pipedapi.darkness.services",
|
||||
"pipedapi.kavin.rocks",
|
||||
"piped-api.hostux.net",
|
||||
"pipedapi.syncpundit.io",
|
||||
"piped-api.cfe.re",
|
||||
"pipedapi.in.projectsegfau.lt",
|
||||
"piapi.ggtyler.dev",
|
||||
"piped-api.codespace.cz",
|
||||
"pipedapi.coldforge.xyz",
|
||||
"pipedapi.osphost.fi",
|
||||
},
|
||||
},
|
||||
DriveCache: CacheConfig{
|
||||
Duration: 48 * time.Hour, // Added
|
||||
Path: "./cache", // Added
|
||||
|
@ -271,6 +308,14 @@ func saveConfig(config Config) {
|
|||
proxiesSec.Key("CrawlerProxyStrict").SetValue(strconv.FormatBool(config.CrawlerProxyStrict))
|
||||
proxiesSec.Key("CrawlerProxies").SetValue(strings.Join(config.CrawlerProxies, ","))
|
||||
|
||||
// MetaSearch section
|
||||
metaSec := cfg.Section("MetaSearches")
|
||||
metaSec.Key("LibreXInstances").SetValue(strings.Join(config.LibreXInstances, ","))
|
||||
metaSec.Key("Text").SetValue(strings.Join(config.MetaSearch.Text, ","))
|
||||
metaSec.Key("Image").SetValue(strings.Join(config.MetaSearch.Image, ","))
|
||||
metaSec.Key("Files").SetValue(strings.Join(config.MetaSearch.Files, ","))
|
||||
metaSec.Key("Video").SetValue(strings.Join(config.MetaSearch.Video, ","))
|
||||
|
||||
// Indexer section
|
||||
indexerSec := cfg.Section("Indexer")
|
||||
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
|
||||
|
@ -328,6 +373,13 @@ func loadConfig() Config {
|
|||
crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
|
||||
crawlerProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("CrawlerProxies"), ""), ",")
|
||||
|
||||
// MetaSearch
|
||||
searchXInstances := strings.Split(getConfigValueString(cfg.Section("MetaSearches").Key("LibreXInstances"), strings.Join(defaultConfig.LibreXInstances, ",")), ",")
|
||||
textList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Text"), strings.Join(defaultConfig.MetaSearch.Text, ",")), ",")
|
||||
imageList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Image"), strings.Join(defaultConfig.MetaSearch.Image, ",")), ",")
|
||||
filesList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Files"), strings.Join(defaultConfig.MetaSearch.Files, ",")), ",")
|
||||
videoList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Video"), strings.Join(defaultConfig.MetaSearch.Video, ",")), ",")
|
||||
|
||||
// Indexing
|
||||
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
|
||||
concurrentChromeCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentChromeCrawlers"), defaultConfig.ConcurrentChromeCrawlers, strconv.Atoi)
|
||||
|
@ -371,6 +423,13 @@ func loadConfig() Config {
|
|||
CrawlingInterval: crawlingInterval,
|
||||
MaxPagesPerDomain: maxPagesPerDomain,
|
||||
IndexBatchSize: indexBatchSize,
|
||||
LibreXInstances: searchXInstances,
|
||||
MetaSearch: MetaSearchConfig{
|
||||
Text: textList,
|
||||
Image: imageList,
|
||||
Files: filesList,
|
||||
Video: videoList,
|
||||
},
|
||||
DriveCache: CacheConfig{
|
||||
Duration: driveDuration,
|
||||
MaxUsageBytes: driveMaxUsage,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue