Changed self-crawling as experimental, cleanup unused features
Some checks failed
Run Integration Tests / test (push) Failing after 1m15s

This commit is contained in:
partisan 2025-06-08 22:12:15 +02:00
parent ca87df5df1
commit 49cb7bb94a
27 changed files with 1731 additions and 832 deletions

47
init.go
View file

@ -1,3 +1,6 @@
//go:build !experimental
// +build !experimental
package main
import (
@ -51,22 +54,7 @@ func main() {
config.Domain = *domainFlag
}
loadNodeConfig()
if config.AuthCode == "" {
config.AuthCode = generateStrongRandomString(64)
printInfo("Generated connection code: %s\n", config.AuthCode)
saveConfig(config)
}
// Generate Host ID
hostID, nodeErr := generateHostID()
if nodeErr != nil {
printErr("Failed to generate host ID: %v", nodeErr)
}
config.PeerID = hostID
if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
if config.MetaProxyEnabled {
InitProxies()
}
@ -87,14 +75,6 @@ func main() {
InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config)
// Start the node client only if NodesEnabled is true
if config.NodesEnabled {
go startNodeClient()
printInfo("Node client started.")
} else {
printInfo("Node client is disabled.")
}
// Check if the cache directory exists when caching is enabled
if config.DriveCacheEnabled {
cacheDir := config.DriveCache.Path
@ -129,24 +109,5 @@ func main() {
printInfo("RAM cache is disabled.")
}
// Init indexer
if config.IndexerEnabled {
if err := downloadAndSetupDomainsCSV(); err != nil {
printErr("Failed to set up domains.csv: %v", err)
return
}
err := InitIndex()
if err != nil {
printErr("Failed to initialize index: %v", err)
}
webCrawlerInit()
printInfo("Indexer is enabled.")
} else {
printInfo("Indexer is disabled.")
}
runServer()
}