added website crawling and indexing crawled results
This commit is contained in:
parent
5b90a372a1
commit
047cccd19f
10 changed files with 819 additions and 97 deletions
25
config.go
25
config.go
|
@ -30,6 +30,7 @@ type Config struct {
|
|||
Domain string // Added
|
||||
NodesEnabled bool // Added
|
||||
CrawlerEnabled bool // Added
|
||||
IndexerEnabled bool // Added
|
||||
WebsiteEnabled bool // Added
|
||||
RamCacheEnabled bool
|
||||
DriveCacheEnabled bool // Added
|
||||
|
@ -46,6 +47,7 @@ var defaultConfig = Config{
|
|||
AuthCode: generateStrongRandomString(64),
|
||||
NodesEnabled: false,
|
||||
CrawlerEnabled: true,
|
||||
IndexerEnabled: false,
|
||||
WebsiteEnabled: true,
|
||||
RamCacheEnabled: true,
|
||||
DriveCacheEnabled: false,
|
||||
|
@ -105,6 +107,15 @@ func createConfig() error {
|
|||
config.Domain = defaultConfig.Domain
|
||||
}
|
||||
|
||||
// printMessage("Use Indexer? (YES/no): ")
|
||||
// indexerChoice, _ := reader.ReadString('\n')
|
||||
// indexerChoice = strings.TrimSpace(strings.ToLower(indexerChoice))
|
||||
// if indexerChoice == "no" {
|
||||
// config.IndexerEnabled = false
|
||||
// } else {
|
||||
// config.IndexerEnabled = true
|
||||
// }
|
||||
|
||||
// Cache settings
|
||||
printMessage("Would you like to configure Cache settings (yes/NO): ")
|
||||
configureCache, _ := reader.ReadString('\n')
|
||||
|
@ -181,7 +192,7 @@ func createConfig() error {
|
|||
} else {
|
||||
config.DriveCache.MaxUsageBytes = parseMaxUsageDrive(driveMaxUsage, drivePath)
|
||||
if config.DriveCache.MaxUsageBytes == 0 {
|
||||
printWarn("Invalid DriveCache max usage, using default (1 TiB).")
|
||||
printWarn("Invalid DriveCache max usage, using default.")
|
||||
config.DriveCache.MaxUsageBytes = defaultConfig.DriveCache.MaxUsageBytes
|
||||
}
|
||||
}
|
||||
|
@ -201,13 +212,6 @@ func createConfig() error {
|
|||
printMessage("Generated connection code: %s\n", config.AuthCode)
|
||||
}
|
||||
|
||||
// Set other default values
|
||||
config.NodesEnabled = defaultConfig.NodesEnabled
|
||||
config.CrawlerEnabled = defaultConfig.CrawlerEnabled
|
||||
config.WebsiteEnabled = defaultConfig.WebsiteEnabled
|
||||
config.LogLevel = defaultConfig.LogLevel
|
||||
|
||||
// Save configuration to file
|
||||
saveConfig(config)
|
||||
printInfo("Configuration saved successfully.")
|
||||
return nil
|
||||
|
@ -232,6 +236,7 @@ func saveConfig(config Config) {
|
|||
featuresSec := cfg.Section("Features")
|
||||
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
|
||||
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
|
||||
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
|
||||
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
||||
featuresSec.Key("RamCache").SetValue(strconv.FormatBool(config.RamCacheEnabled))
|
||||
featuresSec.Key("DriveCache").SetValue(strconv.FormatBool(config.DriveCacheEnabled))
|
||||
|
@ -273,6 +278,7 @@ func loadConfig() Config {
|
|||
// Features
|
||||
nodesEnabled, _ := cfg.Section("Features").Key("Nodes").Bool()
|
||||
crawlerEnabled, _ := cfg.Section("Features").Key("Crawler").Bool()
|
||||
indexerEnabled, _ := cfg.Section("Features").Key("Indexer").Bool()
|
||||
websiteEnabled, _ := cfg.Section("Features").Key("Website").Bool()
|
||||
ramCacheEnabled, _ := cfg.Section("Features").Key("RamCache").Bool()
|
||||
driveCacheEnabled, _ := cfg.Section("Features").Key("DriveCache").Bool()
|
||||
|
@ -294,10 +300,11 @@ func loadConfig() Config {
|
|||
Port: port,
|
||||
Domain: domain,
|
||||
LogLevel: logLevel,
|
||||
AuthCode: authCode, // Assign AuthCode here
|
||||
AuthCode: authCode,
|
||||
Peers: peers,
|
||||
NodesEnabled: nodesEnabled,
|
||||
CrawlerEnabled: crawlerEnabled,
|
||||
IndexerEnabled: indexerEnabled,
|
||||
WebsiteEnabled: websiteEnabled,
|
||||
RamCacheEnabled: ramCacheEnabled,
|
||||
DriveCacheEnabled: driveCacheEnabled,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue