updated 'config.ini'

This commit is contained in:
partisan 2024-11-26 07:46:03 +01:00
parent 28f71271d7
commit be4f86580e
13 changed files with 635 additions and 208 deletions

1
.gitignore vendored
View file

@ -2,5 +2,6 @@ config.json
opensearch.xml
config.ini
image_cache/
cache/
*.min.js
*.min.css

View file

@ -13,6 +13,7 @@ import (
"net/http"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"time"
@ -23,9 +24,9 @@ import (
)
var (
cachingImages = make(map[string]*sync.Mutex)
cachingImagesMu sync.Mutex
cachingSemaphore = make(chan struct{}, 100) // Limit to concurrent downloads
cachingImages = make(map[string]*sync.Mutex)
cachingImagesMu sync.Mutex
// cachingSemaphore = make(chan struct{}, 100) // Limit to concurrent downloads
invalidImageIDs = make(map[string]struct{})
invalidImageIDsMu sync.Mutex
@ -35,8 +36,6 @@ var (
)
func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error) {
cacheDir := "image_cache"
if imageURL == "" {
recordInvalidImageID(imageID)
return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
@ -50,7 +49,7 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
filename = fmt.Sprintf("%s_full.webp", imageID)
}
cachedImagePath := filepath.Join(cacheDir, filename)
cachedImagePath := filepath.Join(config.DriveCache.Path, filename)
tempImagePath := cachedImagePath + ".tmp"
// Check if the image is already cached
@ -74,8 +73,9 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
return cachedImagePath, true, nil
}
cachingSemaphore <- struct{}{} // Acquire a token
defer func() { <-cachingSemaphore }() // Release the token
// // Limit max concurrent downloads
// cachingSemaphore <- struct{}{} // Acquire a token
// defer func() { <-cachingSemaphore }() // Release the token
// Create a custom http.Client that skips SSL certificate verification
client := &http.Client{
@ -109,11 +109,6 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
// Handle SVG files directly
if contentType == "image/svg+xml" {
// Ensure the cache directory exists
if _, err := os.Stat(cacheDir); os.IsNotExist(err) {
os.Mkdir(cacheDir, os.ModePerm)
}
// Save the SVG file as-is to the temp path
err = os.WriteFile(tempImagePath, data, 0644)
if err != nil {
@ -161,10 +156,11 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
return "", false, fmt.Errorf("failed to decode image: %v", err)
}
// Ensure the cache directory exists
if _, err := os.Stat(cacheDir); os.IsNotExist(err) {
os.Mkdir(cacheDir, os.ModePerm)
}
// This is not working
// // Ensure the cache directory exists
// if _, err := os.Stat(config.DriveCache.Path); os.IsNotExist(err) {
// os.Mkdir(config.DriveCache.Path, os.ModePerm)
// }
// Open the temp file for writing
outFile, err := os.Create(tempImagePath)
@ -220,14 +216,19 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
imageID = parts[0]
imageType = parts[1]
cacheDir := "image_cache"
filename := fmt.Sprintf("%s_%s.webp", imageID, imageType)
cachedImagePath := filepath.Join(cacheDir, filename)
cachedImagePath := filepath.Join(config.DriveCache.Path, filename)
if hasExtension && imageType == "thumb" {
// Requesting cached thumbnail image
// Requesting cached image (thumbnail or full)
if _, err := os.Stat(cachedImagePath); err == nil {
// Cached image exists, serve it
// Update the modification time to now
err := os.Chtimes(cachedImagePath, time.Now(), time.Now())
if err != nil {
printWarn("Failed to update modification time for %s: %v", cachedImagePath, err)
}
// Determine content type based on file extension
contentType := "image/webp"
w.Header().Set("Content-Type", contentType)
w.Header().Set("Cache-Control", "public, max-age=31536000")
@ -235,12 +236,12 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
return
} else {
// Cached image not found
if config.HardCacheEnabled {
if config.DriveCacheEnabled {
// Thumbnail should be cached, but not found
serveMissingImage(w, r)
return
}
// Else, proceed to proxy (if HardCacheEnabled is false)
// Else, proceed to proxy if caching is disabled
}
}
@ -260,7 +261,7 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
}
// For thumbnails, if HardCacheEnabled is true, and image not cached, serve missing image
if imageType == "thumb" && config.HardCacheEnabled {
if imageType == "thumb" && config.DriveCacheEnabled {
// Thumbnail should be cached, but not found
serveMissingImage(w, r)
return
@ -311,14 +312,13 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
}
// Check for cached full or thumbnail images
cacheDir := "image_cache"
extensions := []string{"webp", "svg"} // Extensions without leading dots
imageReady := false
// Check thumbnail first
for _, ext := range extensions {
thumbFilename := fmt.Sprintf("%s_thumb.%s", id, ext)
thumbPath := filepath.Join(cacheDir, thumbFilename)
thumbPath := filepath.Join(config.DriveCache.Path, thumbFilename)
if _, err := os.Stat(thumbPath); err == nil {
statusMap[id] = fmt.Sprintf("/image/%s_thumb.%s", id, ext)
@ -331,7 +331,7 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
if !imageReady {
for _, ext := range extensions {
fullFilename := fmt.Sprintf("%s_full.%s", id, ext)
fullPath := filepath.Join(cacheDir, fullFilename)
fullPath := filepath.Join(config.DriveCache.Path, fullFilename)
if _, err := os.Stat(fullPath); err == nil {
statusMap[id] = fmt.Sprintf("/image/%s_full.%s", id, ext)
@ -343,7 +343,7 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
// If neither is ready
if !imageReady {
if !config.HardCacheEnabled {
if !config.DriveCacheEnabled {
// Hard cache is disabled; use the proxy URL
statusMap[id] = fmt.Sprintf("/image/%s_thumb", id)
} else {
@ -424,6 +424,77 @@ func removeImageResultFromCache(query string, page int, safe bool, lang string,
}
}
func cleanExpiredCachedImages() {
if config.DriveCache.Duration <= 0 && config.DriveCache.MaxUsageBytes <= 0 {
return // No cleanup needed if both duration and max usage are disabled
}
ticker := time.NewTicker(1 * time.Hour)
defer ticker.Stop()
for range ticker.C {
cleanupCache()
}
}
func cleanupCache() {
files, err := os.ReadDir(config.DriveCache.Path)
if err != nil {
printErr("Failed to read DriveCache directory: %v", err)
return
}
var totalSize uint64
fileInfos := make([]os.FileInfo, 0, len(files))
for _, file := range files {
info, err := file.Info()
if err != nil {
continue
}
filePath := filepath.Join(config.DriveCache.Path, file.Name())
// Check for expired files based on modification time
if config.DriveCache.Duration > 0 && time.Since(info.ModTime()) > config.DriveCache.Duration {
if err := os.Remove(filePath); err == nil {
printDebug("Removed expired cache file: %s", filePath)
} else {
printErr("Failed to remove expired cache file: %s", filePath)
}
continue // Skip adding this file to the list
}
// Accumulate total size and store file info for potential deletion
totalSize += uint64(info.Size())
fileInfos = append(fileInfos, info)
}
// If total size exceeds MaxUsageBytes, delete least recently used files
if config.DriveCache.MaxUsageBytes > 0 && totalSize > config.DriveCache.MaxUsageBytes {
// Sort files by last access time (oldest first)
sort.Slice(fileInfos, func(i, j int) bool {
return fileInfos[i].ModTime().Before(fileInfos[j].ModTime())
})
for _, info := range fileInfos {
if totalSize <= config.DriveCache.MaxUsageBytes {
break
}
filePath := filepath.Join(config.DriveCache.Path, info.Name())
fileSize := uint64(info.Size())
if err := os.Remove(filePath); err == nil {
totalSize -= fileSize
printDebug("Removed cache file to reduce size: %s", filePath)
} else {
printErr("Failed to remove cache file: %s", filePath)
}
}
}
}
func getContentType(ext string) string {
switch strings.ToLower(ext) {
case "svg":

View file

@ -8,11 +8,6 @@ import (
"github.com/shirou/gopsutil/mem"
)
var (
resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
maxMemoryUsage = 90.0 // Maximum memory usage in %
)
// SearchResult is a generic interface for all types of search results.
type SearchResult interface{}
@ -89,11 +84,14 @@ type ResultsCache struct {
expiration time.Duration
}
var resultsCache *ResultsCache
// NewResultsCache creates a new ResultsCache with a specified expiration duration.
func NewResultsCache(expiration time.Duration) *ResultsCache {
func NewResultsCache() *ResultsCache {
printDebug("Initializing results cache with expiration: %s and max usage: %d bytes", config.RamCache.Duration, config.RamCache.MaxUsageBytes)
return &ResultsCache{
results: make(map[string]CachedItem),
expiration: expiration,
expiration: config.RamCache.Duration,
}
}
@ -108,8 +106,9 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
}
// Check if the item has expired
if time.Since(item.StoredTime) > rc.expiration {
if time.Since(item.StoredTime) > config.RamCache.Duration {
delete(rc.results, rc.keyToString(key))
printDebug("Cache expired for key: %s", rc.keyToString(key))
return nil, false
}
@ -135,12 +134,24 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
return fmt.Sprintf("%s|%d|%t|%s|%s", key.Query, key.Page, key.Safe, key.Lang, key.Type)
}
// checkAndCleanCache removes items if memory usage exceeds the limit.
func (rc *ResultsCache) checkAndCleanCache() {
if rc.memoryUsage() > maxMemoryUsage {
for rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
rc.cleanOldestItems()
}
}
// currentMemoryUsage calculates the current memory usage in bytes.
func (rc *ResultsCache) currentMemoryUsage() uint64 {
v, err := mem.VirtualMemory()
if err != nil {
printErr("Failed to get memory info: %v", err)
return 0
}
return v.Used // Used memory in bytes
}
// memoryUsage calculates the current memory usage as a percentage.
func (rc *ResultsCache) memoryUsage() float64 {
v, err := mem.VirtualMemory()
if err != nil {
@ -155,7 +166,7 @@ func (rc *ResultsCache) cleanOldestItems() {
rc.mu.Lock()
defer rc.mu.Unlock()
for rc.memoryUsage() > maxMemoryUsage {
for rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
var oldestKey string
var oldestTime time.Time = time.Now()

View file

@ -65,6 +65,7 @@ func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]inte
}
}
// Randoms string generator used for auth code
func generateStrongRandomString(length int) string {
bytes := make([]byte, length)
_, err := rand.Read(bytes)
@ -84,7 +85,7 @@ func isLocalAddress(domain string) bool {
return domain == "localhost" || strings.HasPrefix(domain, "127.") || strings.HasPrefix(domain, "192.168.") || strings.HasPrefix(domain, "10.")
}
// Ensures that HTTP or HTTPS is befor the adress if needed
// Ensures that HTTP or HTTPS is before the address if needed
func addProtocol(domain string) string {
if hasProtocol(domain) {
return domain

418
config.go
View file

@ -2,20 +2,74 @@ package main
import (
"bufio"
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"syscall"
"time"
"github.com/shirou/gopsutil/mem"
"gopkg.in/ini.v1"
)
var configFilePath = "./config.ini"
type CacheConfig struct {
Duration time.Duration
MaxUsageBytes uint64 // Store as bytes for uniformity
Path string
}
type Config struct {
Port int // Added
AuthCode string // Added
PeerID string // Added
Peers []string
Domain string // Added
NodesEnabled bool // Added
CrawlerEnabled bool // Added
WebsiteEnabled bool // Added
RamCacheEnabled bool
DriveCacheEnabled bool // Added
LogLevel int // Added
DriveCache CacheConfig
RamCache CacheConfig
}
var defaultConfig = Config{
Port: 5000,
Domain: "localhost",
Peers: []string{},
AuthCode: generateStrongRandomString(64),
NodesEnabled: false,
CrawlerEnabled: true,
WebsiteEnabled: true,
RamCacheEnabled: true,
DriveCacheEnabled: false,
LogLevel: 1,
DriveCache: CacheConfig{
Duration: 48 * time.Hour, // Added
Path: "./cache", // Added
MaxUsageBytes: parseMaxUsageDrive("90 %", config.DriveCache.Path), // Added
},
RamCache: CacheConfig{
Duration: 6 * time.Hour, // Added
MaxUsageBytes: parseMaxUsageRam("90%"), // Added
},
}
func initConfig() error {
// Check if the configuration file exists
if _, err := os.Stat(configFilePath); os.IsNotExist(err) {
// If not, create a new configuration
return createConfig()
}
printInfo("Configuration file already exists.")
printInfo("Configuration file already exists. Loading configuration.")
// Load existing configuration
config = loadConfig()
return nil
}
@ -23,60 +77,180 @@ func initConfig() error {
func createConfig() error {
reader := bufio.NewReader(os.Stdin)
printMessage("Configuration file not found.")
printMessage("Do you want to use default values? (yes/no): ")
printMessage("Configuration file not found. Let's set it up.")
printMessage("Do you want to use default values? (yes/NO): ")
useDefaults, _ := reader.ReadString('\n')
if strings.TrimSpace(useDefaults) != "yes" {
if strings.TrimSpace(strings.ToLower(useDefaults)) != "yes" {
// Server settings
printMessage("Enter port (default 5000): ")
portStr, _ := reader.ReadString('\n')
if portStr != "\n" {
port, err := strconv.Atoi(strings.TrimSpace(portStr))
if err != nil {
config.Port = 5000
} else {
portStr = strings.TrimSpace(portStr)
if portStr != "" {
port, err := strconv.Atoi(portStr)
if err == nil {
config.Port = port
} else {
printWarn("Invalid port, using default (5000).")
config.Port = defaultConfig.Port
}
} else {
config.Port = defaultConfig.Port
}
printMessage("Enter your domain address (default localhost): ")
domain, _ := reader.ReadString('\n')
if domain != "\n" {
config.Domain = strings.TrimSpace(domain)
config.Domain = strings.TrimSpace(domain)
if config.Domain == "" {
config.Domain = defaultConfig.Domain
}
// Cache settings
printMessage("Would you like to configure Cache settings (yes/NO): ")
configureCache, _ := reader.ReadString('\n')
if strings.TrimSpace(strings.ToLower(configureCache)) == "yes" {
// RamCache settings
printMessage("Enter duration to store results in Ram (default 6h): ")
ramDurationStr, _ := reader.ReadString('\n')
ramDurationStr = strings.TrimSpace(ramDurationStr)
if ramDurationStr == "" {
config.RamCache.Duration = defaultConfig.RamCache.Duration
config.RamCacheEnabled = true
} else if ramDurationStr == "0h" {
config.RamCacheEnabled = false
} else {
ramDuration, err := time.ParseDuration(ramDurationStr)
if err != nil {
printWarn("Invalid duration, using default (6h).")
config.RamCache.Duration = defaultConfig.RamCache.Duration
config.RamCacheEnabled = true
} else {
config.RamCache.Duration = ramDuration
config.RamCacheEnabled = true
}
}
printMessage("Enter RamCache max usage, e.g., 2 GiB or 80%% (default 90%%): ")
ramMaxUsage, _ := reader.ReadString('\n')
ramMaxUsage = strings.TrimSpace(ramMaxUsage)
if ramMaxUsage == "" {
config.RamCache.MaxUsageBytes = defaultConfig.RamCache.MaxUsageBytes
} else if ramMaxUsage == "0" || parseMaxUsageRam(ramMaxUsage) == 0 {
config.RamCacheEnabled = false
} else {
config.RamCache.MaxUsageBytes = parseMaxUsageRam(ramMaxUsage)
if config.RamCache.MaxUsageBytes == 0 {
printWarn("Invalid RamCache max usage, using default (90%%).")
config.RamCache.MaxUsageBytes = defaultConfig.RamCache.MaxUsageBytes
}
}
// DriveCache settings
printMessage("Enter duration to store results in DriveCache (default 0h): ")
driveDurationStr, _ := reader.ReadString('\n')
driveDurationStr = strings.TrimSpace(driveDurationStr)
if driveDurationStr == "" {
config.DriveCache.Duration = defaultConfig.DriveCache.Duration
config.DriveCacheEnabled = config.DriveCache.Duration > 0
} else if driveDurationStr == "0h" {
config.DriveCacheEnabled = false
} else {
driveDuration, err := time.ParseDuration(driveDurationStr)
if err != nil {
printWarn("Invalid duration, using default (48h).")
config.DriveCache.Duration = defaultConfig.DriveCache.Duration
config.DriveCacheEnabled = config.DriveCache.Duration > 0
} else {
config.DriveCache.Duration = driveDuration
config.DriveCacheEnabled = config.DriveCache.Duration > 0
}
}
printMessage("Enter DriveCache path (default ./cache): ")
drivePath, _ := reader.ReadString('\n')
drivePath = strings.TrimSpace(drivePath)
if drivePath == "" {
config.DriveCache.Path = defaultConfig.DriveCache.Path
} else {
config.DriveCache.Path = drivePath
}
printMessage("Enter DriveCache max usage, e.g., 2 GiB or 90%% (default 90%%): ")
driveMaxUsage, _ := reader.ReadString('\n')
driveMaxUsage = strings.TrimSpace(driveMaxUsage)
if driveMaxUsage == "" {
config.DriveCache.MaxUsageBytes = defaultConfig.DriveCache.MaxUsageBytes
} else if driveMaxUsage == "0" || parseMaxUsageDrive(driveMaxUsage, drivePath) == 0 {
config.DriveCacheEnabled = false
} else {
config.DriveCache.MaxUsageBytes = parseMaxUsageDrive(driveMaxUsage, drivePath)
if config.DriveCache.MaxUsageBytes == 0 {
printWarn("Invalid DriveCache max usage, using default (1 TiB).")
config.DriveCache.MaxUsageBytes = defaultConfig.DriveCache.MaxUsageBytes
}
}
} else {
printInfo("Cache settings skipped. Using default values.")
config.RamCache = defaultConfig.RamCache
config.DriveCache = defaultConfig.DriveCache
}
} else {
// Use default configuration
config = defaultConfig
}
// Generate AuthCode if missing
if config.AuthCode == "" {
config.AuthCode = generateStrongRandomString(64)
printMessage("Generated connection code: %s\n", config.AuthCode)
}
config.NodesEnabled = len(config.Peers) > 0
config.CrawlerEnabled = true
config.WebsiteEnabled = true
config.LogLevel = 1
// Set other default values
config.NodesEnabled = defaultConfig.NodesEnabled
config.CrawlerEnabled = defaultConfig.CrawlerEnabled
config.WebsiteEnabled = defaultConfig.WebsiteEnabled
config.LogLevel = defaultConfig.LogLevel
// Save configuration to file
saveConfig(config)
printInfo("Configuration saved successfully.")
return nil
}
func saveConfig(config Config) {
cfg := ini.Empty()
sec := cfg.Section("")
sec.Key("Port").SetValue(strconv.Itoa(config.Port))
sec.Key("AuthCode").SetValue(config.AuthCode)
sec.Key("PeerID").SetValue(config.PeerID)
peers := strings.Join(config.Peers, ",")
sec.Key("Peers").SetValue(peers)
// Server section
sec := cfg.Section("Server")
sec.Key("Port").SetValue(strconv.Itoa(config.Port))
sec.Key("Domain").SetValue(config.Domain)
sec.Key("NodesEnabled").SetValue(strconv.FormatBool(config.NodesEnabled))
sec.Key("CrawlerEnabled").SetValue(strconv.FormatBool(config.CrawlerEnabled))
sec.Key("WebsiteEnabled").SetValue(strconv.FormatBool(config.WebsiteEnabled))
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
sec.Key("HardCacheDuration").SetValue(config.HardCacheDuration.String())
// Peers section
peersSec := cfg.Section("Peers")
peersSec.Key("AuthCode").SetValue(config.AuthCode)
peersSec.Key("PeerID").SetValue(config.PeerID)
peersSec.Key("Peers").SetValue(strings.Join(config.Peers, ","))
// Features section
featuresSec := cfg.Section("Features")
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
featuresSec.Key("RamCache").SetValue(strconv.FormatBool(config.RamCacheEnabled))
featuresSec.Key("DriveCache").SetValue(strconv.FormatBool(config.DriveCacheEnabled))
// DriveCache section
driveSec := cfg.Section("DriveCache")
driveSec.Key("Duration").SetValue(config.DriveCache.Duration.String())
driveSec.Key("MaxUsage").SetValue(formatMaxUsage(config.DriveCache.MaxUsageBytes))
driveSec.Key("Path").SetValue(config.DriveCache.Path)
// driveSec.Key("MaxConcurrentDownloads.Thumbnail").SetValue(strconv.Itoa(config.DriveCache.MaxConcurrentThumbnailDownloads))
// RamCache section
ramSec := cfg.Section("RamCache")
ramSec.Key("Duration").SetValue(config.RamCache.Duration.String())
ramSec.Key("MaxUsage").SetValue(formatMaxUsage(config.RamCache.MaxUsageBytes))
err := cfg.SaveTo(configFilePath)
if err != nil {
@ -90,73 +264,153 @@ func loadConfig() Config {
printErr("Error opening config file: %v", err)
}
port, err := cfg.Section("").Key("Port").Int()
if err != nil || port == 0 {
port = 5000 // Default to 5000 if not set or error
}
// Server
port, _ := cfg.Section("Server").Key("Port").Int()
domain := cfg.Section("Server").Key("Domain").String()
logLevel, _ := cfg.Section("Server").Key("LogLevel").Int()
peersStr := cfg.Section("").Key("Peers").String()
var peers []string
if peersStr != "" {
peers = strings.Split(peersStr, ",")
for i, peer := range peers {
peers[i] = addProtocol(peer)
}
}
// Peers
authCode := cfg.Section("Peers").Key("AuthCode").String()
peersStr := cfg.Section("Peers").Key("Peers").String()
peers := strings.Split(peersStr, ",")
domain := cfg.Section("").Key("Domain").String()
if domain == "" {
domain = "localhost" // Default to localhost if not set
}
// Features
nodesEnabled, _ := cfg.Section("Features").Key("Nodes").Bool()
crawlerEnabled, _ := cfg.Section("Features").Key("Crawler").Bool()
websiteEnabled, _ := cfg.Section("Features").Key("Website").Bool()
ramCacheEnabled, _ := cfg.Section("Features").Key("RamCache").Bool()
driveCacheEnabled, _ := cfg.Section("Features").Key("DriveCache").Bool()
nodesEnabled, err := cfg.Section("").Key("NodesEnabled").Bool()
if err != nil { // If NodesEnabled is not found in config
nodesEnabled = len(peers) > 0 // Enable nodes if peers are configured
}
// DriveCache
driveDuration, _ := time.ParseDuration(cfg.Section("DriveCache").Key("Duration").String())
drivePath := cfg.Section("DriveCache").Key("Path").String()
driveMaxUsage := parseMaxUsageDrive(cfg.Section("DriveCache").Key("MaxUsage").String(), drivePath)
// maxConcurrentDownloads, _ := cfg.Section("DriveCache").Key("MaxConcurrentDownloads.Thumbnail").Int()
// if maxConcurrentDownloads == 0 {
// maxConcurrentDownloads = defaultConfig.DriveCache.MaxConcurrentThumbnailDownloads
// }
crawlerEnabled, err := cfg.Section("").Key("CrawlerEnabled").Bool()
if err != nil { // Default to true if not found
crawlerEnabled = true
}
// RamCache
ramDuration, _ := time.ParseDuration(cfg.Section("RamCache").Key("Duration").String())
ramMaxUsage := parseMaxUsageRam(cfg.Section("RamCache").Key("MaxUsage").String())
websiteEnabled, err := cfg.Section("").Key("WebsiteEnabled").Bool()
if err != nil { // Default to true if not found
websiteEnabled = true
}
logLevel, err := cfg.Section("").Key("LogLevel").Int()
if err != nil || logLevel < 0 || logLevel > 4 { // Default to 1 if not found or out of range
logLevel = 1
}
// Read HardCacheDuration
hardCacheStr := cfg.Section("").Key("HardCacheDuration").String()
var hardCacheDuration time.Duration
if hardCacheStr != "" {
duration, err := time.ParseDuration(hardCacheStr)
if err != nil {
printWarn("Invalid HardCacheDuration format, defaulting to 0: %v", err)
hardCacheDuration = 0
} else {
hardCacheDuration = duration
}
} else {
hardCacheDuration = 0 // Default to 0 if not set
}
config = Config{
return Config{
Port: port,
AuthCode: cfg.Section("").Key("AuthCode").String(),
PeerID: cfg.Section("").Key("PeerID").String(),
Peers: peers,
Domain: domain,
LogLevel: logLevel,
AuthCode: authCode, // Assign AuthCode here
Peers: peers,
NodesEnabled: nodesEnabled,
CrawlerEnabled: crawlerEnabled,
WebsiteEnabled: websiteEnabled,
LogLevel: logLevel,
HardCacheDuration: hardCacheDuration,
HardCacheEnabled: hardCacheDuration != 0,
RamCacheEnabled: ramCacheEnabled,
DriveCacheEnabled: driveCacheEnabled,
DriveCache: CacheConfig{
Duration: driveDuration,
MaxUsageBytes: driveMaxUsage,
Path: drivePath,
// MaxConcurrentThumbnailDownloads: maxConcurrentDownloads,
},
RamCache: CacheConfig{
Duration: ramDuration,
MaxUsageBytes: ramMaxUsage,
},
}
}
// Helper to parse MaxUsage string into bytes
func parseMaxUsageRam(value string) uint64 {
const GiB = 1024 * 1024 * 1024
value = strings.TrimSpace(value)
valueNoSpaces := strings.ReplaceAll(value, " ", "")
if strings.HasSuffix(valueNoSpaces, "%") {
percentStr := strings.TrimSuffix(valueNoSpaces, "%")
percent, err := strconv.ParseFloat(percentStr, 64)
if err != nil {
return 0
}
totalMem := getTotalMemory()
return uint64(float64(totalMem) * (percent / 100))
} else if strings.HasSuffix(valueNoSpaces, "GiB") {
sizeStr := strings.TrimSuffix(valueNoSpaces, "GiB")
size, err := strconv.ParseFloat(sizeStr, 64)
if err != nil {
return 0
}
return uint64(size * GiB)
}
return config
return 0
}
// Helper to parse MaxUsage string into bytes based on drive space
func parseMaxUsageDrive(value string, cachePath string) uint64 {
const GiB = 1024 * 1024 * 1024
value = strings.TrimSpace(value)
valueNoSpaces := strings.ReplaceAll(value, " ", "")
totalDiskSpace := getTotalDiskSpace(cachePath)
if totalDiskSpace == 0 {
printErr("Failed to retrieve disk space for path: %s", cachePath)
return 0
}
if strings.HasSuffix(valueNoSpaces, "%") {
percentStr := strings.TrimSuffix(valueNoSpaces, "%")
percent, err := strconv.ParseFloat(percentStr, 64)
if err != nil {
return 0
}
return uint64(float64(totalDiskSpace) * (percent / 100))
} else if strings.HasSuffix(valueNoSpaces, "GiB") {
sizeStr := strings.TrimSuffix(valueNoSpaces, "GiB")
size, err := strconv.ParseFloat(sizeStr, 64)
if err != nil {
return 0
}
return uint64(size * GiB)
}
return 0
}
// Get total disk space of the system where cachePath resides
func getTotalDiskSpace(cachePath string) uint64 {
var stat syscall.Statfs_t
// Get filesystem stats for the cache path
absPath, err := filepath.Abs(cachePath)
if err != nil {
printErr("Failed to resolve absolute path for: %s", cachePath)
return 0
}
err = syscall.Statfs(absPath, &stat)
if err != nil {
printErr("Failed to retrieve filesystem stats for: %s", absPath)
return 0
}
// Total disk space in bytes
return stat.Blocks * uint64(stat.Bsize)
}
// Helper to format bytes back to human-readable string
func formatMaxUsage(bytes uint64) string {
const GiB = 1024 * 1024 * 1024
if bytes >= GiB {
return fmt.Sprintf("%.2fGiB", float64(bytes)/GiB)
}
return fmt.Sprintf("%dbytes", bytes)
}
// Get total memory of the system
func getTotalMemory() uint64 {
v, err := mem.VirtualMemory()
if err != nil {
printErr("Failed to retrieve system memory: %v", err)
return 0
}
return v.Total
}

View file

@ -76,10 +76,10 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
go func() {
results, exists := resultsCache.Get(cacheKey)
if exists {
printInfo("Cache hit")
printDebug("Cache hit")
cacheChan <- results
} else {
printInfo("Cache miss")
printDebug("Cache miss")
cacheChan <- nil
}
}()
@ -87,19 +87,28 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
// Fetch only if the cache miss occurs and Crawler is enabled
if config.CrawlerEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printDebug("Crawler disabled; skipping fetching.")
}
} else {
_, torrentResults, _ := convertToSpecificResults(results)
combinedResults = torrentResults
}
case <-time.After(2 * time.Second):
printInfo("Cache check timeout")
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
printDebug("Cache check timeout")
if config.CrawlerEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printDebug("Crawler disabled; skipping fetching.")
}
}
@ -107,6 +116,12 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
}
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
// If Crawler is disabled, skip fetching from torrent sites
if !config.CrawlerEnabled {
printInfo("Crawler is disabled; skipping torrent site fetching.")
return []TorrentResult{}
}
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
results := []TorrentResult{}
@ -116,10 +131,11 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
}
res, err := site.Search(query, "all")
if err != nil {
printWarn("Error searching with %s: %v", site.Name(), err)
continue
}
for _, r := range res {
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", prehaps usless now?
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", prehaps useless now?
results = append(results, r)
}
}

View file

@ -11,6 +11,11 @@ import (
)
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping forum search.")
return []ForumSearchResult{}, nil
}
const (
pageSize = 25
baseURL = "https://www.reddit.com"
@ -101,8 +106,18 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
// Start measuring the time for fetching results
startTime := time.Now()
// Perform the forum search
results, err := PerformRedditSearch(query, settings.SafeSearch, page)
var results []ForumSearchResult
var err error
// Check if CrawlerEnabled is true before performing Reddit search
if config.CrawlerEnabled {
results, err = PerformRedditSearch(query, settings.SafeSearch, page)
} else {
printDebug("Crawler is disabled; skipping Reddit search.")
results = []ForumSearchResult{}
}
// Use fallback (other nodes) if no results or an error occurred
if err != nil || len(results) == 0 {
log.Printf("No results from primary search, trying other nodes")
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)

View file

@ -54,7 +54,7 @@ func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSett
"Theme": settings.Theme,
"Safe": settings.SafeSearch,
"IsThemeDark": settings.IsThemeDark,
"HardCacheEnabled": config.HardCacheEnabled,
"HardCacheEnabled": config.DriveCacheEnabled,
"JsDisabled": jsDisabled,
}
@ -75,10 +75,10 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
go func() {
results, exists := resultsCache.Get(cacheKey)
if exists {
printInfo("Cache hit")
printDebug("Cache hit")
cacheChan <- results
} else {
printInfo("Cache miss")
printDebug("Cache miss")
cacheChan <- nil
}
}()
@ -86,21 +86,29 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
if config.CrawlerEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printDebug("Crawler disabled; skipping fetching from image search engines.")
}
} else {
_, _, imageResults := convertToSpecificResults(results)
combinedResults = filterValidImages(imageResults)
}
case <-time.After(2 * time.Second):
printInfo("Cache check timeout")
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
printDebug("Cache check timeout")
if config.CrawlerEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printDebug("Crawler disabled; skipping fetching from image search engines.")
}
}
@ -109,6 +117,13 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
var results []ImageSearchResult
// Check if CrawlerEnabled is false
if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping image search engine fetching.")
return results
}
engineCount := len(imageSearchEngines)
// Determine the engine to use based on the page number
@ -145,7 +160,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
imageURLMapMu.Unlock()
// Set ProxyFull and ProxyThumb
if config.HardCacheEnabled {
if config.DriveCacheEnabled {
// Cache the thumbnail image asynchronously
go func(imgResult ImageSearchResult) {
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
@ -204,7 +219,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
imageURLMap[fmt.Sprintf("%s_thumb", hash)] = imageResult.Thumb
imageURLMapMu.Unlock()
if config.HardCacheEnabled {
if config.DriveCacheEnabled {
// Cache the thumbnail image asynchronously
go func(imgResult ImageSearchResult) {
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)

55
init.go
View file

@ -3,37 +3,8 @@ package main
import (
"flag"
"os"
"time"
)
type Config struct {
Port int
AuthCode string
PeerID string
Peers []string
Domain string
NodesEnabled bool
CrawlerEnabled bool
WebsiteEnabled bool
LogLevel int
HardCacheDuration time.Duration
HardCacheEnabled bool
}
var defaultConfig = Config{
Port: 5000,
Domain: "localhost",
Peers: []string{},
AuthCode: generateStrongRandomString(64),
NodesEnabled: true,
CrawlerEnabled: true,
WebsiteEnabled: true,
LogLevel: 1,
HardCacheDuration: 0,
}
const configFilePath = "config.ini"
var config Config
func main() {
@ -60,7 +31,7 @@ func main() {
// Initialize configuration interactively or from config file
err := initConfig()
if err != nil {
printErr("Error during initialization:")
printErr("Error during initialization: %v", err)
return
}
}
@ -91,7 +62,29 @@ func main() {
InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config)
go startNodeClient()
// Start the node client only if NodesEnabled is true
if config.NodesEnabled {
go startNodeClient()
printInfo("Node client started.")
} else {
printInfo("Node client is disabled.")
}
// Start periodic cleanup of expired cache files
if config.DriveCacheEnabled {
go cleanExpiredCachedImages()
printInfo("Drive cache started.")
} else {
printInfo("Drive cache is disabled.")
}
// Start periodic cleanup of expired cache files
if config.RamCacheEnabled {
resultsCache = NewResultsCache()
printInfo("RAM cache started.")
} else {
printInfo("RAM cache is disabled.")
}
runServer()
}

53
main.go
View file

@ -210,22 +210,45 @@ func parsePageParameter(pageStr string) int {
}
func runServer() {
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
http.HandleFunc("/", handleSearch)
http.HandleFunc("/search", handleSearch)
http.HandleFunc("/suggestions", handleSuggestions)
// The /imgproxy handler is deprecated, now its handled by /image/
// http.HandleFunc("/imgproxy", handleImageProxy)
http.HandleFunc("/node", handleNodeRequest)
http.HandleFunc("/settings", handleSettings)
http.HandleFunc("/save-settings", handleSaveSettings)
http.HandleFunc("/image/", handleImageServe)
http.HandleFunc("/image_status", handleImageStatus)
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
http.ServeFile(w, r, "static/opensearch.xml")
})
if config.WebsiteEnabled {
// Website-related endpoints
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
http.HandleFunc("/", handleSearch)
http.HandleFunc("/search", handleSearch)
http.HandleFunc("/suggestions", handleSuggestions)
http.HandleFunc("/settings", handleSettings)
http.HandleFunc("/save-settings", handleSaveSettings)
http.HandleFunc("/image/", handleImageServe)
http.HandleFunc("/image_status", handleImageStatus)
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
http.ServeFile(w, r, "static/opensearch.xml")
})
printInfo("Website functionality enabled.")
} else {
// Redirect all website routes to a "service disabled" handler
http.HandleFunc("/static/", handleWebsiteDisabled)
http.HandleFunc("/", handleWebsiteDisabled)
http.HandleFunc("/search", handleWebsiteDisabled)
http.HandleFunc("/settings", handleWebsiteDisabled)
http.HandleFunc("/save-settings", handleWebsiteDisabled)
http.HandleFunc("/image/", handleWebsiteDisabled)
http.HandleFunc("/image_status", handleWebsiteDisabled)
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
printInfo("Website functionality disabled.")
}
if config.NodesEnabled {
http.HandleFunc("/node", handleNodeRequest)
}
printMessage("Server is listening on http://localhost:%d", config.Port)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
}
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
w.WriteHeader(http.StatusServiceUnavailable)
_, _ = w.Write([]byte("The website functionality is currently disabled."))
}

View file

@ -205,9 +205,6 @@ func fetchSuggestionsFromURL(url string) []string {
return []string{}
}
// Print the raw HTTP response for debugging
fmt.Printf("Raw response from %s:\n%s\n", url, string(body))
// Log the Content-Type for debugging.
contentType := resp.Header.Get("Content-Type")
printDebug("Response Content-Type from %s: %s", url, contentType)

41
text.go
View file

@ -73,9 +73,14 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
// Fetch only if the cache miss occurs and Crawler is enabled
if config.CrawlerEnabled {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printInfo("Crawler disabled; skipping fetching.")
}
} else {
textResults, _, _ := convertToSpecificResults(results)
@ -83,9 +88,13 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
}
case <-time.After(2 * time.Second):
printInfo("Cache check timeout")
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
if config.CrawlerEnabled {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printInfo("Crawler disabled; skipping fetching.")
}
}
@ -96,9 +105,13 @@ func prefetchPage(query, safe, lang string, page int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists {
printInfo("Page %d not cached, caching now...", page)
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
if config.CrawlerEnabled {
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
}
} else {
printInfo("Crawler disabled; skipping prefetch for page %d", page)
}
} else {
printInfo("Page %d already cached", page)
@ -107,6 +120,13 @@ func prefetchPage(query, safe, lang string, page int) {
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult
// If Crawler is disabled, do not fetch from search engines
if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping search engine fetching.")
return results // Return an empty list
}
engineCount := len(textSearchEngines)
// Determine which engine to use for the current page
@ -117,7 +137,7 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
enginePage := (page-1)/engineCount + 1
// Debug print to verify engine and page number being fetched
printInfo("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
printDebug("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
// Fetch results from the selected engine
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
@ -146,7 +166,6 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
}
}
// Final debug print to display results count and source
printInfo("Fetched %d results for overall page %d", len(results), page)
return results

View file

@ -150,7 +150,11 @@ func makeHTMLRequest(query, safe, lang string, page int) (*VideoAPIResponse, err
func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
start := time.Now()
results := fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
var results []VideoResult
if config.CrawlerEnabled {
results = fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
}
if len(results) == 0 {
printWarn("No results from primary search, trying other nodes")
results = tryOtherNodesForVideoSearch(query, settings.SafeSearch, settings.SearchLanguage, page, []string{hostID})
@ -178,6 +182,13 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
}
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
// Check if the crawler is enabled
if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping video search.")
return []VideoResult{}
}
// Proceed with Piped API request if CrawlerEnabled
apiResp, err := makeHTMLRequest(query, safe, lang, page)
if err != nil {
printWarn("Error fetching video results: %v", err)