updated 'config.ini'
This commit is contained in:
parent
28f71271d7
commit
be4f86580e
13 changed files with 635 additions and 208 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -2,5 +2,6 @@ config.json
|
||||||
opensearch.xml
|
opensearch.xml
|
||||||
config.ini
|
config.ini
|
||||||
image_cache/
|
image_cache/
|
||||||
|
cache/
|
||||||
*.min.js
|
*.min.js
|
||||||
*.min.css
|
*.min.css
|
123
cache-images.go
123
cache-images.go
|
@ -13,6 +13,7 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
@ -25,7 +26,7 @@ import (
|
||||||
var (
|
var (
|
||||||
cachingImages = make(map[string]*sync.Mutex)
|
cachingImages = make(map[string]*sync.Mutex)
|
||||||
cachingImagesMu sync.Mutex
|
cachingImagesMu sync.Mutex
|
||||||
cachingSemaphore = make(chan struct{}, 100) // Limit to concurrent downloads
|
// cachingSemaphore = make(chan struct{}, 100) // Limit to concurrent downloads
|
||||||
|
|
||||||
invalidImageIDs = make(map[string]struct{})
|
invalidImageIDs = make(map[string]struct{})
|
||||||
invalidImageIDsMu sync.Mutex
|
invalidImageIDsMu sync.Mutex
|
||||||
|
@ -35,8 +36,6 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error) {
|
func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error) {
|
||||||
cacheDir := "image_cache"
|
|
||||||
|
|
||||||
if imageURL == "" {
|
if imageURL == "" {
|
||||||
recordInvalidImageID(imageID)
|
recordInvalidImageID(imageID)
|
||||||
return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
|
return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
|
||||||
|
@ -50,7 +49,7 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
|
||||||
filename = fmt.Sprintf("%s_full.webp", imageID)
|
filename = fmt.Sprintf("%s_full.webp", imageID)
|
||||||
}
|
}
|
||||||
|
|
||||||
cachedImagePath := filepath.Join(cacheDir, filename)
|
cachedImagePath := filepath.Join(config.DriveCache.Path, filename)
|
||||||
tempImagePath := cachedImagePath + ".tmp"
|
tempImagePath := cachedImagePath + ".tmp"
|
||||||
|
|
||||||
// Check if the image is already cached
|
// Check if the image is already cached
|
||||||
|
@ -74,8 +73,9 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
|
||||||
return cachedImagePath, true, nil
|
return cachedImagePath, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
cachingSemaphore <- struct{}{} // Acquire a token
|
// // Limit max concurrent downloads
|
||||||
defer func() { <-cachingSemaphore }() // Release the token
|
// cachingSemaphore <- struct{}{} // Acquire a token
|
||||||
|
// defer func() { <-cachingSemaphore }() // Release the token
|
||||||
|
|
||||||
// Create a custom http.Client that skips SSL certificate verification
|
// Create a custom http.Client that skips SSL certificate verification
|
||||||
client := &http.Client{
|
client := &http.Client{
|
||||||
|
@ -109,11 +109,6 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
|
||||||
|
|
||||||
// Handle SVG files directly
|
// Handle SVG files directly
|
||||||
if contentType == "image/svg+xml" {
|
if contentType == "image/svg+xml" {
|
||||||
// Ensure the cache directory exists
|
|
||||||
if _, err := os.Stat(cacheDir); os.IsNotExist(err) {
|
|
||||||
os.Mkdir(cacheDir, os.ModePerm)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save the SVG file as-is to the temp path
|
// Save the SVG file as-is to the temp path
|
||||||
err = os.WriteFile(tempImagePath, data, 0644)
|
err = os.WriteFile(tempImagePath, data, 0644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -161,10 +156,11 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
|
||||||
return "", false, fmt.Errorf("failed to decode image: %v", err)
|
return "", false, fmt.Errorf("failed to decode image: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure the cache directory exists
|
// This is not working
|
||||||
if _, err := os.Stat(cacheDir); os.IsNotExist(err) {
|
// // Ensure the cache directory exists
|
||||||
os.Mkdir(cacheDir, os.ModePerm)
|
// if _, err := os.Stat(config.DriveCache.Path); os.IsNotExist(err) {
|
||||||
}
|
// os.Mkdir(config.DriveCache.Path, os.ModePerm)
|
||||||
|
// }
|
||||||
|
|
||||||
// Open the temp file for writing
|
// Open the temp file for writing
|
||||||
outFile, err := os.Create(tempImagePath)
|
outFile, err := os.Create(tempImagePath)
|
||||||
|
@ -220,14 +216,19 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
|
||||||
imageID = parts[0]
|
imageID = parts[0]
|
||||||
imageType = parts[1]
|
imageType = parts[1]
|
||||||
|
|
||||||
cacheDir := "image_cache"
|
|
||||||
filename := fmt.Sprintf("%s_%s.webp", imageID, imageType)
|
filename := fmt.Sprintf("%s_%s.webp", imageID, imageType)
|
||||||
cachedImagePath := filepath.Join(cacheDir, filename)
|
cachedImagePath := filepath.Join(config.DriveCache.Path, filename)
|
||||||
|
|
||||||
if hasExtension && imageType == "thumb" {
|
if hasExtension && imageType == "thumb" {
|
||||||
// Requesting cached thumbnail image
|
// Requesting cached image (thumbnail or full)
|
||||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
if _, err := os.Stat(cachedImagePath); err == nil {
|
||||||
// Cached image exists, serve it
|
// Update the modification time to now
|
||||||
|
err := os.Chtimes(cachedImagePath, time.Now(), time.Now())
|
||||||
|
if err != nil {
|
||||||
|
printWarn("Failed to update modification time for %s: %v", cachedImagePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine content type based on file extension
|
||||||
contentType := "image/webp"
|
contentType := "image/webp"
|
||||||
w.Header().Set("Content-Type", contentType)
|
w.Header().Set("Content-Type", contentType)
|
||||||
w.Header().Set("Cache-Control", "public, max-age=31536000")
|
w.Header().Set("Cache-Control", "public, max-age=31536000")
|
||||||
|
@ -235,12 +236,12 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
// Cached image not found
|
// Cached image not found
|
||||||
if config.HardCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
// Thumbnail should be cached, but not found
|
// Thumbnail should be cached, but not found
|
||||||
serveMissingImage(w, r)
|
serveMissingImage(w, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Else, proceed to proxy (if HardCacheEnabled is false)
|
// Else, proceed to proxy if caching is disabled
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,7 +261,7 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// For thumbnails, if HardCacheEnabled is true, and image not cached, serve missing image
|
// For thumbnails, if HardCacheEnabled is true, and image not cached, serve missing image
|
||||||
if imageType == "thumb" && config.HardCacheEnabled {
|
if imageType == "thumb" && config.DriveCacheEnabled {
|
||||||
// Thumbnail should be cached, but not found
|
// Thumbnail should be cached, but not found
|
||||||
serveMissingImage(w, r)
|
serveMissingImage(w, r)
|
||||||
return
|
return
|
||||||
|
@ -311,14 +312,13 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for cached full or thumbnail images
|
// Check for cached full or thumbnail images
|
||||||
cacheDir := "image_cache"
|
|
||||||
extensions := []string{"webp", "svg"} // Extensions without leading dots
|
extensions := []string{"webp", "svg"} // Extensions without leading dots
|
||||||
imageReady := false
|
imageReady := false
|
||||||
|
|
||||||
// Check thumbnail first
|
// Check thumbnail first
|
||||||
for _, ext := range extensions {
|
for _, ext := range extensions {
|
||||||
thumbFilename := fmt.Sprintf("%s_thumb.%s", id, ext)
|
thumbFilename := fmt.Sprintf("%s_thumb.%s", id, ext)
|
||||||
thumbPath := filepath.Join(cacheDir, thumbFilename)
|
thumbPath := filepath.Join(config.DriveCache.Path, thumbFilename)
|
||||||
|
|
||||||
if _, err := os.Stat(thumbPath); err == nil {
|
if _, err := os.Stat(thumbPath); err == nil {
|
||||||
statusMap[id] = fmt.Sprintf("/image/%s_thumb.%s", id, ext)
|
statusMap[id] = fmt.Sprintf("/image/%s_thumb.%s", id, ext)
|
||||||
|
@ -331,7 +331,7 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
if !imageReady {
|
if !imageReady {
|
||||||
for _, ext := range extensions {
|
for _, ext := range extensions {
|
||||||
fullFilename := fmt.Sprintf("%s_full.%s", id, ext)
|
fullFilename := fmt.Sprintf("%s_full.%s", id, ext)
|
||||||
fullPath := filepath.Join(cacheDir, fullFilename)
|
fullPath := filepath.Join(config.DriveCache.Path, fullFilename)
|
||||||
|
|
||||||
if _, err := os.Stat(fullPath); err == nil {
|
if _, err := os.Stat(fullPath); err == nil {
|
||||||
statusMap[id] = fmt.Sprintf("/image/%s_full.%s", id, ext)
|
statusMap[id] = fmt.Sprintf("/image/%s_full.%s", id, ext)
|
||||||
|
@ -343,7 +343,7 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
// If neither is ready
|
// If neither is ready
|
||||||
if !imageReady {
|
if !imageReady {
|
||||||
if !config.HardCacheEnabled {
|
if !config.DriveCacheEnabled {
|
||||||
// Hard cache is disabled; use the proxy URL
|
// Hard cache is disabled; use the proxy URL
|
||||||
statusMap[id] = fmt.Sprintf("/image/%s_thumb", id)
|
statusMap[id] = fmt.Sprintf("/image/%s_thumb", id)
|
||||||
} else {
|
} else {
|
||||||
|
@ -424,6 +424,77 @@ func removeImageResultFromCache(query string, page int, safe bool, lang string,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func cleanExpiredCachedImages() {
|
||||||
|
if config.DriveCache.Duration <= 0 && config.DriveCache.MaxUsageBytes <= 0 {
|
||||||
|
return // No cleanup needed if both duration and max usage are disabled
|
||||||
|
}
|
||||||
|
|
||||||
|
ticker := time.NewTicker(1 * time.Hour)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
for range ticker.C {
|
||||||
|
cleanupCache()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanupCache() {
|
||||||
|
files, err := os.ReadDir(config.DriveCache.Path)
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to read DriveCache directory: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var totalSize uint64
|
||||||
|
fileInfos := make([]os.FileInfo, 0, len(files))
|
||||||
|
|
||||||
|
for _, file := range files {
|
||||||
|
info, err := file.Info()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
filePath := filepath.Join(config.DriveCache.Path, file.Name())
|
||||||
|
|
||||||
|
// Check for expired files based on modification time
|
||||||
|
if config.DriveCache.Duration > 0 && time.Since(info.ModTime()) > config.DriveCache.Duration {
|
||||||
|
if err := os.Remove(filePath); err == nil {
|
||||||
|
printDebug("Removed expired cache file: %s", filePath)
|
||||||
|
} else {
|
||||||
|
printErr("Failed to remove expired cache file: %s", filePath)
|
||||||
|
}
|
||||||
|
continue // Skip adding this file to the list
|
||||||
|
}
|
||||||
|
|
||||||
|
// Accumulate total size and store file info for potential deletion
|
||||||
|
totalSize += uint64(info.Size())
|
||||||
|
fileInfos = append(fileInfos, info)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If total size exceeds MaxUsageBytes, delete least recently used files
|
||||||
|
if config.DriveCache.MaxUsageBytes > 0 && totalSize > config.DriveCache.MaxUsageBytes {
|
||||||
|
// Sort files by last access time (oldest first)
|
||||||
|
sort.Slice(fileInfos, func(i, j int) bool {
|
||||||
|
return fileInfos[i].ModTime().Before(fileInfos[j].ModTime())
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, info := range fileInfos {
|
||||||
|
if totalSize <= config.DriveCache.MaxUsageBytes {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
filePath := filepath.Join(config.DriveCache.Path, info.Name())
|
||||||
|
fileSize := uint64(info.Size())
|
||||||
|
|
||||||
|
if err := os.Remove(filePath); err == nil {
|
||||||
|
totalSize -= fileSize
|
||||||
|
printDebug("Removed cache file to reduce size: %s", filePath)
|
||||||
|
} else {
|
||||||
|
printErr("Failed to remove cache file: %s", filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func getContentType(ext string) string {
|
func getContentType(ext string) string {
|
||||||
switch strings.ToLower(ext) {
|
switch strings.ToLower(ext) {
|
||||||
case "svg":
|
case "svg":
|
||||||
|
|
31
cache.go
31
cache.go
|
@ -8,11 +8,6 @@ import (
|
||||||
"github.com/shirou/gopsutil/mem"
|
"github.com/shirou/gopsutil/mem"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
|
|
||||||
maxMemoryUsage = 90.0 // Maximum memory usage in %
|
|
||||||
)
|
|
||||||
|
|
||||||
// SearchResult is a generic interface for all types of search results.
|
// SearchResult is a generic interface for all types of search results.
|
||||||
type SearchResult interface{}
|
type SearchResult interface{}
|
||||||
|
|
||||||
|
@ -89,11 +84,14 @@ type ResultsCache struct {
|
||||||
expiration time.Duration
|
expiration time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var resultsCache *ResultsCache
|
||||||
|
|
||||||
// NewResultsCache creates a new ResultsCache with a specified expiration duration.
|
// NewResultsCache creates a new ResultsCache with a specified expiration duration.
|
||||||
func NewResultsCache(expiration time.Duration) *ResultsCache {
|
func NewResultsCache() *ResultsCache {
|
||||||
|
printDebug("Initializing results cache with expiration: %s and max usage: %d bytes", config.RamCache.Duration, config.RamCache.MaxUsageBytes)
|
||||||
return &ResultsCache{
|
return &ResultsCache{
|
||||||
results: make(map[string]CachedItem),
|
results: make(map[string]CachedItem),
|
||||||
expiration: expiration,
|
expiration: config.RamCache.Duration,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,8 +106,9 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the item has expired
|
// Check if the item has expired
|
||||||
if time.Since(item.StoredTime) > rc.expiration {
|
if time.Since(item.StoredTime) > config.RamCache.Duration {
|
||||||
delete(rc.results, rc.keyToString(key))
|
delete(rc.results, rc.keyToString(key))
|
||||||
|
printDebug("Cache expired for key: %s", rc.keyToString(key))
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,12 +134,24 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
|
||||||
return fmt.Sprintf("%s|%d|%t|%s|%s", key.Query, key.Page, key.Safe, key.Lang, key.Type)
|
return fmt.Sprintf("%s|%d|%t|%s|%s", key.Query, key.Page, key.Safe, key.Lang, key.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// checkAndCleanCache removes items if memory usage exceeds the limit.
|
||||||
func (rc *ResultsCache) checkAndCleanCache() {
|
func (rc *ResultsCache) checkAndCleanCache() {
|
||||||
if rc.memoryUsage() > maxMemoryUsage {
|
for rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
|
||||||
rc.cleanOldestItems()
|
rc.cleanOldestItems()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// currentMemoryUsage calculates the current memory usage in bytes.
|
||||||
|
func (rc *ResultsCache) currentMemoryUsage() uint64 {
|
||||||
|
v, err := mem.VirtualMemory()
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to get memory info: %v", err)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return v.Used // Used memory in bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
// memoryUsage calculates the current memory usage as a percentage.
|
||||||
func (rc *ResultsCache) memoryUsage() float64 {
|
func (rc *ResultsCache) memoryUsage() float64 {
|
||||||
v, err := mem.VirtualMemory()
|
v, err := mem.VirtualMemory()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -155,7 +166,7 @@ func (rc *ResultsCache) cleanOldestItems() {
|
||||||
rc.mu.Lock()
|
rc.mu.Lock()
|
||||||
defer rc.mu.Unlock()
|
defer rc.mu.Unlock()
|
||||||
|
|
||||||
for rc.memoryUsage() > maxMemoryUsage {
|
for rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
|
||||||
var oldestKey string
|
var oldestKey string
|
||||||
var oldestTime time.Time = time.Now()
|
var oldestTime time.Time = time.Now()
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,7 @@ func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]inte
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Randoms string generator used for auth code
|
||||||
func generateStrongRandomString(length int) string {
|
func generateStrongRandomString(length int) string {
|
||||||
bytes := make([]byte, length)
|
bytes := make([]byte, length)
|
||||||
_, err := rand.Read(bytes)
|
_, err := rand.Read(bytes)
|
||||||
|
@ -84,7 +85,7 @@ func isLocalAddress(domain string) bool {
|
||||||
return domain == "localhost" || strings.HasPrefix(domain, "127.") || strings.HasPrefix(domain, "192.168.") || strings.HasPrefix(domain, "10.")
|
return domain == "localhost" || strings.HasPrefix(domain, "127.") || strings.HasPrefix(domain, "192.168.") || strings.HasPrefix(domain, "10.")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensures that HTTP or HTTPS is befor the adress if needed
|
// Ensures that HTTP or HTTPS is before the address if needed
|
||||||
func addProtocol(domain string) string {
|
func addProtocol(domain string) string {
|
||||||
if hasProtocol(domain) {
|
if hasProtocol(domain) {
|
||||||
return domain
|
return domain
|
||||||
|
|
416
config.go
416
config.go
|
@ -2,20 +2,74 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/shirou/gopsutil/mem"
|
||||||
"gopkg.in/ini.v1"
|
"gopkg.in/ini.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var configFilePath = "./config.ini"
|
||||||
|
|
||||||
|
type CacheConfig struct {
|
||||||
|
Duration time.Duration
|
||||||
|
MaxUsageBytes uint64 // Store as bytes for uniformity
|
||||||
|
Path string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
Port int // Added
|
||||||
|
AuthCode string // Added
|
||||||
|
PeerID string // Added
|
||||||
|
Peers []string
|
||||||
|
Domain string // Added
|
||||||
|
NodesEnabled bool // Added
|
||||||
|
CrawlerEnabled bool // Added
|
||||||
|
WebsiteEnabled bool // Added
|
||||||
|
RamCacheEnabled bool
|
||||||
|
DriveCacheEnabled bool // Added
|
||||||
|
LogLevel int // Added
|
||||||
|
|
||||||
|
DriveCache CacheConfig
|
||||||
|
RamCache CacheConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultConfig = Config{
|
||||||
|
Port: 5000,
|
||||||
|
Domain: "localhost",
|
||||||
|
Peers: []string{},
|
||||||
|
AuthCode: generateStrongRandomString(64),
|
||||||
|
NodesEnabled: false,
|
||||||
|
CrawlerEnabled: true,
|
||||||
|
WebsiteEnabled: true,
|
||||||
|
RamCacheEnabled: true,
|
||||||
|
DriveCacheEnabled: false,
|
||||||
|
LogLevel: 1,
|
||||||
|
DriveCache: CacheConfig{
|
||||||
|
Duration: 48 * time.Hour, // Added
|
||||||
|
Path: "./cache", // Added
|
||||||
|
MaxUsageBytes: parseMaxUsageDrive("90 %", config.DriveCache.Path), // Added
|
||||||
|
},
|
||||||
|
RamCache: CacheConfig{
|
||||||
|
Duration: 6 * time.Hour, // Added
|
||||||
|
MaxUsageBytes: parseMaxUsageRam("90%"), // Added
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
func initConfig() error {
|
func initConfig() error {
|
||||||
|
// Check if the configuration file exists
|
||||||
if _, err := os.Stat(configFilePath); os.IsNotExist(err) {
|
if _, err := os.Stat(configFilePath); os.IsNotExist(err) {
|
||||||
|
// If not, create a new configuration
|
||||||
return createConfig()
|
return createConfig()
|
||||||
}
|
}
|
||||||
|
|
||||||
printInfo("Configuration file already exists.")
|
printInfo("Configuration file already exists. Loading configuration.")
|
||||||
|
// Load existing configuration
|
||||||
config = loadConfig()
|
config = loadConfig()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -23,60 +77,180 @@ func initConfig() error {
|
||||||
func createConfig() error {
|
func createConfig() error {
|
||||||
reader := bufio.NewReader(os.Stdin)
|
reader := bufio.NewReader(os.Stdin)
|
||||||
|
|
||||||
printMessage("Configuration file not found.")
|
printMessage("Configuration file not found. Let's set it up.")
|
||||||
printMessage("Do you want to use default values? (yes/no): ")
|
printMessage("Do you want to use default values? (yes/NO): ")
|
||||||
useDefaults, _ := reader.ReadString('\n')
|
useDefaults, _ := reader.ReadString('\n')
|
||||||
|
|
||||||
if strings.TrimSpace(useDefaults) != "yes" {
|
if strings.TrimSpace(strings.ToLower(useDefaults)) != "yes" {
|
||||||
|
// Server settings
|
||||||
printMessage("Enter port (default 5000): ")
|
printMessage("Enter port (default 5000): ")
|
||||||
portStr, _ := reader.ReadString('\n')
|
portStr, _ := reader.ReadString('\n')
|
||||||
if portStr != "\n" {
|
portStr = strings.TrimSpace(portStr)
|
||||||
port, err := strconv.Atoi(strings.TrimSpace(portStr))
|
if portStr != "" {
|
||||||
if err != nil {
|
port, err := strconv.Atoi(portStr)
|
||||||
config.Port = 5000
|
if err == nil {
|
||||||
} else {
|
|
||||||
config.Port = port
|
config.Port = port
|
||||||
|
} else {
|
||||||
|
printWarn("Invalid port, using default (5000).")
|
||||||
|
config.Port = defaultConfig.Port
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
config.Port = defaultConfig.Port
|
||||||
}
|
}
|
||||||
|
|
||||||
printMessage("Enter your domain address (default localhost): ")
|
printMessage("Enter your domain address (default localhost): ")
|
||||||
domain, _ := reader.ReadString('\n')
|
domain, _ := reader.ReadString('\n')
|
||||||
if domain != "\n" {
|
|
||||||
config.Domain = strings.TrimSpace(domain)
|
config.Domain = strings.TrimSpace(domain)
|
||||||
|
if config.Domain == "" {
|
||||||
|
config.Domain = defaultConfig.Domain
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache settings
|
||||||
|
printMessage("Would you like to configure Cache settings (yes/NO): ")
|
||||||
|
configureCache, _ := reader.ReadString('\n')
|
||||||
|
if strings.TrimSpace(strings.ToLower(configureCache)) == "yes" {
|
||||||
|
// RamCache settings
|
||||||
|
printMessage("Enter duration to store results in Ram (default 6h): ")
|
||||||
|
ramDurationStr, _ := reader.ReadString('\n')
|
||||||
|
ramDurationStr = strings.TrimSpace(ramDurationStr)
|
||||||
|
if ramDurationStr == "" {
|
||||||
|
config.RamCache.Duration = defaultConfig.RamCache.Duration
|
||||||
|
config.RamCacheEnabled = true
|
||||||
|
} else if ramDurationStr == "0h" {
|
||||||
|
config.RamCacheEnabled = false
|
||||||
|
} else {
|
||||||
|
ramDuration, err := time.ParseDuration(ramDurationStr)
|
||||||
|
if err != nil {
|
||||||
|
printWarn("Invalid duration, using default (6h).")
|
||||||
|
config.RamCache.Duration = defaultConfig.RamCache.Duration
|
||||||
|
config.RamCacheEnabled = true
|
||||||
|
} else {
|
||||||
|
config.RamCache.Duration = ramDuration
|
||||||
|
config.RamCacheEnabled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printMessage("Enter RamCache max usage, e.g., 2 GiB or 80%% (default 90%%): ")
|
||||||
|
ramMaxUsage, _ := reader.ReadString('\n')
|
||||||
|
ramMaxUsage = strings.TrimSpace(ramMaxUsage)
|
||||||
|
if ramMaxUsage == "" {
|
||||||
|
config.RamCache.MaxUsageBytes = defaultConfig.RamCache.MaxUsageBytes
|
||||||
|
} else if ramMaxUsage == "0" || parseMaxUsageRam(ramMaxUsage) == 0 {
|
||||||
|
config.RamCacheEnabled = false
|
||||||
|
} else {
|
||||||
|
config.RamCache.MaxUsageBytes = parseMaxUsageRam(ramMaxUsage)
|
||||||
|
if config.RamCache.MaxUsageBytes == 0 {
|
||||||
|
printWarn("Invalid RamCache max usage, using default (90%%).")
|
||||||
|
config.RamCache.MaxUsageBytes = defaultConfig.RamCache.MaxUsageBytes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DriveCache settings
|
||||||
|
printMessage("Enter duration to store results in DriveCache (default 0h): ")
|
||||||
|
driveDurationStr, _ := reader.ReadString('\n')
|
||||||
|
driveDurationStr = strings.TrimSpace(driveDurationStr)
|
||||||
|
if driveDurationStr == "" {
|
||||||
|
config.DriveCache.Duration = defaultConfig.DriveCache.Duration
|
||||||
|
config.DriveCacheEnabled = config.DriveCache.Duration > 0
|
||||||
|
} else if driveDurationStr == "0h" {
|
||||||
|
config.DriveCacheEnabled = false
|
||||||
|
} else {
|
||||||
|
driveDuration, err := time.ParseDuration(driveDurationStr)
|
||||||
|
if err != nil {
|
||||||
|
printWarn("Invalid duration, using default (48h).")
|
||||||
|
config.DriveCache.Duration = defaultConfig.DriveCache.Duration
|
||||||
|
config.DriveCacheEnabled = config.DriveCache.Duration > 0
|
||||||
|
} else {
|
||||||
|
config.DriveCache.Duration = driveDuration
|
||||||
|
config.DriveCacheEnabled = config.DriveCache.Duration > 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printMessage("Enter DriveCache path (default ./cache): ")
|
||||||
|
drivePath, _ := reader.ReadString('\n')
|
||||||
|
drivePath = strings.TrimSpace(drivePath)
|
||||||
|
if drivePath == "" {
|
||||||
|
config.DriveCache.Path = defaultConfig.DriveCache.Path
|
||||||
|
} else {
|
||||||
|
config.DriveCache.Path = drivePath
|
||||||
|
}
|
||||||
|
|
||||||
|
printMessage("Enter DriveCache max usage, e.g., 2 GiB or 90%% (default 90%%): ")
|
||||||
|
driveMaxUsage, _ := reader.ReadString('\n')
|
||||||
|
driveMaxUsage = strings.TrimSpace(driveMaxUsage)
|
||||||
|
if driveMaxUsage == "" {
|
||||||
|
config.DriveCache.MaxUsageBytes = defaultConfig.DriveCache.MaxUsageBytes
|
||||||
|
} else if driveMaxUsage == "0" || parseMaxUsageDrive(driveMaxUsage, drivePath) == 0 {
|
||||||
|
config.DriveCacheEnabled = false
|
||||||
|
} else {
|
||||||
|
config.DriveCache.MaxUsageBytes = parseMaxUsageDrive(driveMaxUsage, drivePath)
|
||||||
|
if config.DriveCache.MaxUsageBytes == 0 {
|
||||||
|
printWarn("Invalid DriveCache max usage, using default (1 TiB).")
|
||||||
|
config.DriveCache.MaxUsageBytes = defaultConfig.DriveCache.MaxUsageBytes
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
printInfo("Cache settings skipped. Using default values.")
|
||||||
|
config.RamCache = defaultConfig.RamCache
|
||||||
|
config.DriveCache = defaultConfig.DriveCache
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Use default configuration
|
||||||
config = defaultConfig
|
config = defaultConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generate AuthCode if missing
|
||||||
if config.AuthCode == "" {
|
if config.AuthCode == "" {
|
||||||
config.AuthCode = generateStrongRandomString(64)
|
config.AuthCode = generateStrongRandomString(64)
|
||||||
printMessage("Generated connection code: %s\n", config.AuthCode)
|
printMessage("Generated connection code: %s\n", config.AuthCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
config.NodesEnabled = len(config.Peers) > 0
|
// Set other default values
|
||||||
config.CrawlerEnabled = true
|
config.NodesEnabled = defaultConfig.NodesEnabled
|
||||||
config.WebsiteEnabled = true
|
config.CrawlerEnabled = defaultConfig.CrawlerEnabled
|
||||||
config.LogLevel = 1
|
config.WebsiteEnabled = defaultConfig.WebsiteEnabled
|
||||||
|
config.LogLevel = defaultConfig.LogLevel
|
||||||
|
|
||||||
|
// Save configuration to file
|
||||||
saveConfig(config)
|
saveConfig(config)
|
||||||
|
printInfo("Configuration saved successfully.")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func saveConfig(config Config) {
|
func saveConfig(config Config) {
|
||||||
cfg := ini.Empty()
|
cfg := ini.Empty()
|
||||||
sec := cfg.Section("")
|
|
||||||
sec.Key("Port").SetValue(strconv.Itoa(config.Port))
|
|
||||||
sec.Key("AuthCode").SetValue(config.AuthCode)
|
|
||||||
sec.Key("PeerID").SetValue(config.PeerID)
|
|
||||||
|
|
||||||
peers := strings.Join(config.Peers, ",")
|
// Server section
|
||||||
sec.Key("Peers").SetValue(peers)
|
sec := cfg.Section("Server")
|
||||||
|
sec.Key("Port").SetValue(strconv.Itoa(config.Port))
|
||||||
sec.Key("Domain").SetValue(config.Domain)
|
sec.Key("Domain").SetValue(config.Domain)
|
||||||
sec.Key("NodesEnabled").SetValue(strconv.FormatBool(config.NodesEnabled))
|
|
||||||
sec.Key("CrawlerEnabled").SetValue(strconv.FormatBool(config.CrawlerEnabled))
|
|
||||||
sec.Key("WebsiteEnabled").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
|
||||||
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
|
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
|
||||||
sec.Key("HardCacheDuration").SetValue(config.HardCacheDuration.String())
|
|
||||||
|
// Peers section
|
||||||
|
peersSec := cfg.Section("Peers")
|
||||||
|
peersSec.Key("AuthCode").SetValue(config.AuthCode)
|
||||||
|
peersSec.Key("PeerID").SetValue(config.PeerID)
|
||||||
|
peersSec.Key("Peers").SetValue(strings.Join(config.Peers, ","))
|
||||||
|
|
||||||
|
// Features section
|
||||||
|
featuresSec := cfg.Section("Features")
|
||||||
|
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
|
||||||
|
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
|
||||||
|
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
||||||
|
featuresSec.Key("RamCache").SetValue(strconv.FormatBool(config.RamCacheEnabled))
|
||||||
|
featuresSec.Key("DriveCache").SetValue(strconv.FormatBool(config.DriveCacheEnabled))
|
||||||
|
|
||||||
|
// DriveCache section
|
||||||
|
driveSec := cfg.Section("DriveCache")
|
||||||
|
driveSec.Key("Duration").SetValue(config.DriveCache.Duration.String())
|
||||||
|
driveSec.Key("MaxUsage").SetValue(formatMaxUsage(config.DriveCache.MaxUsageBytes))
|
||||||
|
driveSec.Key("Path").SetValue(config.DriveCache.Path)
|
||||||
|
// driveSec.Key("MaxConcurrentDownloads.Thumbnail").SetValue(strconv.Itoa(config.DriveCache.MaxConcurrentThumbnailDownloads))
|
||||||
|
|
||||||
|
// RamCache section
|
||||||
|
ramSec := cfg.Section("RamCache")
|
||||||
|
ramSec.Key("Duration").SetValue(config.RamCache.Duration.String())
|
||||||
|
ramSec.Key("MaxUsage").SetValue(formatMaxUsage(config.RamCache.MaxUsageBytes))
|
||||||
|
|
||||||
err := cfg.SaveTo(configFilePath)
|
err := cfg.SaveTo(configFilePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -90,73 +264,153 @@ func loadConfig() Config {
|
||||||
printErr("Error opening config file: %v", err)
|
printErr("Error opening config file: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
port, err := cfg.Section("").Key("Port").Int()
|
// Server
|
||||||
if err != nil || port == 0 {
|
port, _ := cfg.Section("Server").Key("Port").Int()
|
||||||
port = 5000 // Default to 5000 if not set or error
|
domain := cfg.Section("Server").Key("Domain").String()
|
||||||
}
|
logLevel, _ := cfg.Section("Server").Key("LogLevel").Int()
|
||||||
|
|
||||||
peersStr := cfg.Section("").Key("Peers").String()
|
// Peers
|
||||||
var peers []string
|
authCode := cfg.Section("Peers").Key("AuthCode").String()
|
||||||
if peersStr != "" {
|
peersStr := cfg.Section("Peers").Key("Peers").String()
|
||||||
peers = strings.Split(peersStr, ",")
|
peers := strings.Split(peersStr, ",")
|
||||||
for i, peer := range peers {
|
|
||||||
peers[i] = addProtocol(peer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
domain := cfg.Section("").Key("Domain").String()
|
// Features
|
||||||
if domain == "" {
|
nodesEnabled, _ := cfg.Section("Features").Key("Nodes").Bool()
|
||||||
domain = "localhost" // Default to localhost if not set
|
crawlerEnabled, _ := cfg.Section("Features").Key("Crawler").Bool()
|
||||||
}
|
websiteEnabled, _ := cfg.Section("Features").Key("Website").Bool()
|
||||||
|
ramCacheEnabled, _ := cfg.Section("Features").Key("RamCache").Bool()
|
||||||
|
driveCacheEnabled, _ := cfg.Section("Features").Key("DriveCache").Bool()
|
||||||
|
|
||||||
nodesEnabled, err := cfg.Section("").Key("NodesEnabled").Bool()
|
// DriveCache
|
||||||
if err != nil { // If NodesEnabled is not found in config
|
driveDuration, _ := time.ParseDuration(cfg.Section("DriveCache").Key("Duration").String())
|
||||||
nodesEnabled = len(peers) > 0 // Enable nodes if peers are configured
|
drivePath := cfg.Section("DriveCache").Key("Path").String()
|
||||||
}
|
driveMaxUsage := parseMaxUsageDrive(cfg.Section("DriveCache").Key("MaxUsage").String(), drivePath)
|
||||||
|
// maxConcurrentDownloads, _ := cfg.Section("DriveCache").Key("MaxConcurrentDownloads.Thumbnail").Int()
|
||||||
|
// if maxConcurrentDownloads == 0 {
|
||||||
|
// maxConcurrentDownloads = defaultConfig.DriveCache.MaxConcurrentThumbnailDownloads
|
||||||
|
// }
|
||||||
|
|
||||||
crawlerEnabled, err := cfg.Section("").Key("CrawlerEnabled").Bool()
|
// RamCache
|
||||||
if err != nil { // Default to true if not found
|
ramDuration, _ := time.ParseDuration(cfg.Section("RamCache").Key("Duration").String())
|
||||||
crawlerEnabled = true
|
ramMaxUsage := parseMaxUsageRam(cfg.Section("RamCache").Key("MaxUsage").String())
|
||||||
}
|
|
||||||
|
|
||||||
websiteEnabled, err := cfg.Section("").Key("WebsiteEnabled").Bool()
|
return Config{
|
||||||
if err != nil { // Default to true if not found
|
|
||||||
websiteEnabled = true
|
|
||||||
}
|
|
||||||
|
|
||||||
logLevel, err := cfg.Section("").Key("LogLevel").Int()
|
|
||||||
if err != nil || logLevel < 0 || logLevel > 4 { // Default to 1 if not found or out of range
|
|
||||||
logLevel = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read HardCacheDuration
|
|
||||||
hardCacheStr := cfg.Section("").Key("HardCacheDuration").String()
|
|
||||||
var hardCacheDuration time.Duration
|
|
||||||
if hardCacheStr != "" {
|
|
||||||
duration, err := time.ParseDuration(hardCacheStr)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Invalid HardCacheDuration format, defaulting to 0: %v", err)
|
|
||||||
hardCacheDuration = 0
|
|
||||||
} else {
|
|
||||||
hardCacheDuration = duration
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
hardCacheDuration = 0 // Default to 0 if not set
|
|
||||||
}
|
|
||||||
|
|
||||||
config = Config{
|
|
||||||
Port: port,
|
Port: port,
|
||||||
AuthCode: cfg.Section("").Key("AuthCode").String(),
|
|
||||||
PeerID: cfg.Section("").Key("PeerID").String(),
|
|
||||||
Peers: peers,
|
|
||||||
Domain: domain,
|
Domain: domain,
|
||||||
|
LogLevel: logLevel,
|
||||||
|
AuthCode: authCode, // Assign AuthCode here
|
||||||
|
Peers: peers,
|
||||||
NodesEnabled: nodesEnabled,
|
NodesEnabled: nodesEnabled,
|
||||||
CrawlerEnabled: crawlerEnabled,
|
CrawlerEnabled: crawlerEnabled,
|
||||||
WebsiteEnabled: websiteEnabled,
|
WebsiteEnabled: websiteEnabled,
|
||||||
LogLevel: logLevel,
|
RamCacheEnabled: ramCacheEnabled,
|
||||||
HardCacheDuration: hardCacheDuration,
|
DriveCacheEnabled: driveCacheEnabled,
|
||||||
HardCacheEnabled: hardCacheDuration != 0,
|
DriveCache: CacheConfig{
|
||||||
|
Duration: driveDuration,
|
||||||
|
MaxUsageBytes: driveMaxUsage,
|
||||||
|
Path: drivePath,
|
||||||
|
// MaxConcurrentThumbnailDownloads: maxConcurrentDownloads,
|
||||||
|
},
|
||||||
|
RamCache: CacheConfig{
|
||||||
|
Duration: ramDuration,
|
||||||
|
MaxUsageBytes: ramMaxUsage,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return config
|
// Helper to parse MaxUsage string into bytes
|
||||||
|
func parseMaxUsageRam(value string) uint64 {
|
||||||
|
const GiB = 1024 * 1024 * 1024
|
||||||
|
value = strings.TrimSpace(value)
|
||||||
|
valueNoSpaces := strings.ReplaceAll(value, " ", "")
|
||||||
|
|
||||||
|
if strings.HasSuffix(valueNoSpaces, "%") {
|
||||||
|
percentStr := strings.TrimSuffix(valueNoSpaces, "%")
|
||||||
|
percent, err := strconv.ParseFloat(percentStr, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
totalMem := getTotalMemory()
|
||||||
|
return uint64(float64(totalMem) * (percent / 100))
|
||||||
|
} else if strings.HasSuffix(valueNoSpaces, "GiB") {
|
||||||
|
sizeStr := strings.TrimSuffix(valueNoSpaces, "GiB")
|
||||||
|
size, err := strconv.ParseFloat(sizeStr, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return uint64(size * GiB)
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to parse MaxUsage string into bytes based on drive space
|
||||||
|
func parseMaxUsageDrive(value string, cachePath string) uint64 {
|
||||||
|
const GiB = 1024 * 1024 * 1024
|
||||||
|
value = strings.TrimSpace(value)
|
||||||
|
valueNoSpaces := strings.ReplaceAll(value, " ", "")
|
||||||
|
|
||||||
|
totalDiskSpace := getTotalDiskSpace(cachePath)
|
||||||
|
if totalDiskSpace == 0 {
|
||||||
|
printErr("Failed to retrieve disk space for path: %s", cachePath)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasSuffix(valueNoSpaces, "%") {
|
||||||
|
percentStr := strings.TrimSuffix(valueNoSpaces, "%")
|
||||||
|
percent, err := strconv.ParseFloat(percentStr, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return uint64(float64(totalDiskSpace) * (percent / 100))
|
||||||
|
} else if strings.HasSuffix(valueNoSpaces, "GiB") {
|
||||||
|
sizeStr := strings.TrimSuffix(valueNoSpaces, "GiB")
|
||||||
|
size, err := strconv.ParseFloat(sizeStr, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return uint64(size * GiB)
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total disk space of the system where cachePath resides
|
||||||
|
func getTotalDiskSpace(cachePath string) uint64 {
|
||||||
|
var stat syscall.Statfs_t
|
||||||
|
|
||||||
|
// Get filesystem stats for the cache path
|
||||||
|
absPath, err := filepath.Abs(cachePath)
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to resolve absolute path for: %s", cachePath)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
err = syscall.Statfs(absPath, &stat)
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to retrieve filesystem stats for: %s", absPath)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Total disk space in bytes
|
||||||
|
return stat.Blocks * uint64(stat.Bsize)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to format bytes back to human-readable string
|
||||||
|
func formatMaxUsage(bytes uint64) string {
|
||||||
|
const GiB = 1024 * 1024 * 1024
|
||||||
|
if bytes >= GiB {
|
||||||
|
return fmt.Sprintf("%.2fGiB", float64(bytes)/GiB)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%dbytes", bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total memory of the system
|
||||||
|
func getTotalMemory() uint64 {
|
||||||
|
v, err := mem.VirtualMemory()
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to retrieve system memory: %v", err)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return v.Total
|
||||||
}
|
}
|
||||||
|
|
24
files.go
24
files.go
|
@ -76,10 +76,10 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
go func() {
|
go func() {
|
||||||
results, exists := resultsCache.Get(cacheKey)
|
results, exists := resultsCache.Get(cacheKey)
|
||||||
if exists {
|
if exists {
|
||||||
printInfo("Cache hit")
|
printDebug("Cache hit")
|
||||||
cacheChan <- results
|
cacheChan <- results
|
||||||
} else {
|
} else {
|
||||||
printInfo("Cache miss")
|
printDebug("Cache miss")
|
||||||
cacheChan <- nil
|
cacheChan <- nil
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
@ -87,26 +87,41 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler disabled; skipping fetching.")
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
_, torrentResults, _ := convertToSpecificResults(results)
|
_, torrentResults, _ := convertToSpecificResults(results)
|
||||||
combinedResults = torrentResults
|
combinedResults = torrentResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printInfo("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler disabled; skipping fetching.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return combinedResults
|
return combinedResults
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||||
|
// If Crawler is disabled, skip fetching from torrent sites
|
||||||
|
if !config.CrawlerEnabled {
|
||||||
|
printInfo("Crawler is disabled; skipping torrent site fetching.")
|
||||||
|
return []TorrentResult{}
|
||||||
|
}
|
||||||
|
|
||||||
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
||||||
results := []TorrentResult{}
|
results := []TorrentResult{}
|
||||||
|
|
||||||
|
@ -116,10 +131,11 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||||
}
|
}
|
||||||
res, err := site.Search(query, "all")
|
res, err := site.Search(query, "all")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printWarn("Error searching with %s: %v", site.Name(), err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, r := range res {
|
for _, r := range res {
|
||||||
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", prehaps usless now?
|
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", prehaps useless now?
|
||||||
results = append(results, r)
|
results = append(results, r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
19
forums.go
19
forums.go
|
@ -11,6 +11,11 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
||||||
|
if !config.CrawlerEnabled {
|
||||||
|
printDebug("Crawler is disabled; skipping forum search.")
|
||||||
|
return []ForumSearchResult{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
pageSize = 25
|
pageSize = 25
|
||||||
baseURL = "https://www.reddit.com"
|
baseURL = "https://www.reddit.com"
|
||||||
|
@ -101,8 +106,18 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
||||||
// Start measuring the time for fetching results
|
// Start measuring the time for fetching results
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
|
||||||
// Perform the forum search
|
var results []ForumSearchResult
|
||||||
results, err := PerformRedditSearch(query, settings.SafeSearch, page)
|
var err error
|
||||||
|
|
||||||
|
// Check if CrawlerEnabled is true before performing Reddit search
|
||||||
|
if config.CrawlerEnabled {
|
||||||
|
results, err = PerformRedditSearch(query, settings.SafeSearch, page)
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler is disabled; skipping Reddit search.")
|
||||||
|
results = []ForumSearchResult{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use fallback (other nodes) if no results or an error occurred
|
||||||
if err != nil || len(results) == 0 {
|
if err != nil || len(results) == 0 {
|
||||||
log.Printf("No results from primary search, trying other nodes")
|
log.Printf("No results from primary search, trying other nodes")
|
||||||
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)
|
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||||
|
|
27
images.go
27
images.go
|
@ -54,7 +54,7 @@ func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSett
|
||||||
"Theme": settings.Theme,
|
"Theme": settings.Theme,
|
||||||
"Safe": settings.SafeSearch,
|
"Safe": settings.SafeSearch,
|
||||||
"IsThemeDark": settings.IsThemeDark,
|
"IsThemeDark": settings.IsThemeDark,
|
||||||
"HardCacheEnabled": config.HardCacheEnabled,
|
"HardCacheEnabled": config.DriveCacheEnabled,
|
||||||
"JsDisabled": jsDisabled,
|
"JsDisabled": jsDisabled,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,10 +75,10 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
go func() {
|
go func() {
|
||||||
results, exists := resultsCache.Get(cacheKey)
|
results, exists := resultsCache.Get(cacheKey)
|
||||||
if exists {
|
if exists {
|
||||||
printInfo("Cache hit")
|
printDebug("Cache hit")
|
||||||
cacheChan <- results
|
cacheChan <- results
|
||||||
} else {
|
} else {
|
||||||
printInfo("Cache miss")
|
printDebug("Cache miss")
|
||||||
cacheChan <- nil
|
cacheChan <- nil
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
@ -86,22 +86,30 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
combinedResults = filterValidImages(combinedResults)
|
combinedResults = filterValidImages(combinedResults)
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
_, _, imageResults := convertToSpecificResults(results)
|
_, _, imageResults := convertToSpecificResults(results)
|
||||||
combinedResults = filterValidImages(imageResults)
|
combinedResults = filterValidImages(imageResults)
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printInfo("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
combinedResults = filterValidImages(combinedResults)
|
combinedResults = filterValidImages(combinedResults)
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return combinedResults
|
return combinedResults
|
||||||
|
@ -109,6 +117,13 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
|
|
||||||
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
||||||
var results []ImageSearchResult
|
var results []ImageSearchResult
|
||||||
|
|
||||||
|
// Check if CrawlerEnabled is false
|
||||||
|
if !config.CrawlerEnabled {
|
||||||
|
printDebug("Crawler is disabled; skipping image search engine fetching.")
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
engineCount := len(imageSearchEngines)
|
engineCount := len(imageSearchEngines)
|
||||||
|
|
||||||
// Determine the engine to use based on the page number
|
// Determine the engine to use based on the page number
|
||||||
|
@ -145,7 +160,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
|
||||||
imageURLMapMu.Unlock()
|
imageURLMapMu.Unlock()
|
||||||
|
|
||||||
// Set ProxyFull and ProxyThumb
|
// Set ProxyFull and ProxyThumb
|
||||||
if config.HardCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
// Cache the thumbnail image asynchronously
|
// Cache the thumbnail image asynchronously
|
||||||
go func(imgResult ImageSearchResult) {
|
go func(imgResult ImageSearchResult) {
|
||||||
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
|
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
|
||||||
|
@ -204,7 +219,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
|
||||||
imageURLMap[fmt.Sprintf("%s_thumb", hash)] = imageResult.Thumb
|
imageURLMap[fmt.Sprintf("%s_thumb", hash)] = imageResult.Thumb
|
||||||
imageURLMapMu.Unlock()
|
imageURLMapMu.Unlock()
|
||||||
|
|
||||||
if config.HardCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
// Cache the thumbnail image asynchronously
|
// Cache the thumbnail image asynchronously
|
||||||
go func(imgResult ImageSearchResult) {
|
go func(imgResult ImageSearchResult) {
|
||||||
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
|
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
|
||||||
|
|
53
init.go
53
init.go
|
@ -3,37 +3,8 @@ package main
|
||||||
import (
|
import (
|
||||||
"flag"
|
"flag"
|
||||||
"os"
|
"os"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
Port int
|
|
||||||
AuthCode string
|
|
||||||
PeerID string
|
|
||||||
Peers []string
|
|
||||||
Domain string
|
|
||||||
NodesEnabled bool
|
|
||||||
CrawlerEnabled bool
|
|
||||||
WebsiteEnabled bool
|
|
||||||
LogLevel int
|
|
||||||
HardCacheDuration time.Duration
|
|
||||||
HardCacheEnabled bool
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultConfig = Config{
|
|
||||||
Port: 5000,
|
|
||||||
Domain: "localhost",
|
|
||||||
Peers: []string{},
|
|
||||||
AuthCode: generateStrongRandomString(64),
|
|
||||||
NodesEnabled: true,
|
|
||||||
CrawlerEnabled: true,
|
|
||||||
WebsiteEnabled: true,
|
|
||||||
LogLevel: 1,
|
|
||||||
HardCacheDuration: 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
const configFilePath = "config.ini"
|
|
||||||
|
|
||||||
var config Config
|
var config Config
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
@ -60,7 +31,7 @@ func main() {
|
||||||
// Initialize configuration interactively or from config file
|
// Initialize configuration interactively or from config file
|
||||||
err := initConfig()
|
err := initConfig()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printErr("Error during initialization:")
|
printErr("Error during initialization: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -91,7 +62,29 @@ func main() {
|
||||||
InitializeLanguage("en") // Initialize language before generating OpenSearch
|
InitializeLanguage("en") // Initialize language before generating OpenSearch
|
||||||
generateOpenSearchXML(config)
|
generateOpenSearchXML(config)
|
||||||
|
|
||||||
|
// Start the node client only if NodesEnabled is true
|
||||||
|
if config.NodesEnabled {
|
||||||
go startNodeClient()
|
go startNodeClient()
|
||||||
|
printInfo("Node client started.")
|
||||||
|
} else {
|
||||||
|
printInfo("Node client is disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start periodic cleanup of expired cache files
|
||||||
|
if config.DriveCacheEnabled {
|
||||||
|
go cleanExpiredCachedImages()
|
||||||
|
printInfo("Drive cache started.")
|
||||||
|
} else {
|
||||||
|
printInfo("Drive cache is disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start periodic cleanup of expired cache files
|
||||||
|
if config.RamCacheEnabled {
|
||||||
|
resultsCache = NewResultsCache()
|
||||||
|
printInfo("RAM cache started.")
|
||||||
|
} else {
|
||||||
|
printInfo("RAM cache is disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
runServer()
|
runServer()
|
||||||
}
|
}
|
||||||
|
|
29
main.go
29
main.go
|
@ -210,13 +210,13 @@ func parsePageParameter(pageStr string) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
func runServer() {
|
func runServer() {
|
||||||
|
|
||||||
|
if config.WebsiteEnabled {
|
||||||
|
// Website-related endpoints
|
||||||
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
||||||
http.HandleFunc("/", handleSearch)
|
http.HandleFunc("/", handleSearch)
|
||||||
http.HandleFunc("/search", handleSearch)
|
http.HandleFunc("/search", handleSearch)
|
||||||
http.HandleFunc("/suggestions", handleSuggestions)
|
http.HandleFunc("/suggestions", handleSuggestions)
|
||||||
// The /imgproxy handler is deprecated, now its handled by /image/
|
|
||||||
// http.HandleFunc("/imgproxy", handleImageProxy)
|
|
||||||
http.HandleFunc("/node", handleNodeRequest)
|
|
||||||
http.HandleFunc("/settings", handleSettings)
|
http.HandleFunc("/settings", handleSettings)
|
||||||
http.HandleFunc("/save-settings", handleSaveSettings)
|
http.HandleFunc("/save-settings", handleSaveSettings)
|
||||||
http.HandleFunc("/image/", handleImageServe)
|
http.HandleFunc("/image/", handleImageServe)
|
||||||
|
@ -225,7 +225,30 @@ func runServer() {
|
||||||
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
||||||
http.ServeFile(w, r, "static/opensearch.xml")
|
http.ServeFile(w, r, "static/opensearch.xml")
|
||||||
})
|
})
|
||||||
|
printInfo("Website functionality enabled.")
|
||||||
|
} else {
|
||||||
|
// Redirect all website routes to a "service disabled" handler
|
||||||
|
http.HandleFunc("/static/", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/search", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/settings", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/save-settings", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/image/", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/image_status", handleWebsiteDisabled)
|
||||||
|
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
|
||||||
|
printInfo("Website functionality disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.NodesEnabled {
|
||||||
|
http.HandleFunc("/node", handleNodeRequest)
|
||||||
|
}
|
||||||
|
|
||||||
printMessage("Server is listening on http://localhost:%d", config.Port)
|
printMessage("Server is listening on http://localhost:%d", config.Port)
|
||||||
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
|
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "text/plain")
|
||||||
|
w.WriteHeader(http.StatusServiceUnavailable)
|
||||||
|
_, _ = w.Write([]byte("The website functionality is currently disabled."))
|
||||||
|
}
|
||||||
|
|
|
@ -205,9 +205,6 @@ func fetchSuggestionsFromURL(url string) []string {
|
||||||
return []string{}
|
return []string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Print the raw HTTP response for debugging
|
|
||||||
fmt.Printf("Raw response from %s:\n%s\n", url, string(body))
|
|
||||||
|
|
||||||
// Log the Content-Type for debugging.
|
// Log the Content-Type for debugging.
|
||||||
contentType := resp.Header.Get("Content-Type")
|
contentType := resp.Header.Get("Content-Type")
|
||||||
printDebug("Response Content-Type from %s: %s", url, contentType)
|
printDebug("Response Content-Type from %s: %s", url, contentType)
|
||||||
|
|
23
text.go
23
text.go
|
@ -73,20 +73,29 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchTextResults(query, safe, lang, page)
|
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printInfo("Crawler disabled; skipping fetching.")
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
textResults, _, _ := convertToSpecificResults(results)
|
textResults, _, _ := convertToSpecificResults(results)
|
||||||
combinedResults = textResults
|
combinedResults = textResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printInfo("Cache check timeout")
|
printInfo("Cache check timeout")
|
||||||
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchTextResults(query, safe, lang, page)
|
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printInfo("Crawler disabled; skipping fetching.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return combinedResults
|
return combinedResults
|
||||||
|
@ -96,10 +105,14 @@ func prefetchPage(query, safe, lang string, page int) {
|
||||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
|
||||||
if _, exists := resultsCache.Get(cacheKey); !exists {
|
if _, exists := resultsCache.Get(cacheKey); !exists {
|
||||||
printInfo("Page %d not cached, caching now...", page)
|
printInfo("Page %d not cached, caching now...", page)
|
||||||
|
if config.CrawlerEnabled {
|
||||||
pageResults := fetchTextResults(query, safe, lang, page)
|
pageResults := fetchTextResults(query, safe, lang, page)
|
||||||
if len(pageResults) > 0 {
|
if len(pageResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
printInfo("Crawler disabled; skipping prefetch for page %d", page)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
printInfo("Page %d already cached", page)
|
printInfo("Page %d already cached", page)
|
||||||
}
|
}
|
||||||
|
@ -107,6 +120,13 @@ func prefetchPage(query, safe, lang string, page int) {
|
||||||
|
|
||||||
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||||
var results []TextSearchResult
|
var results []TextSearchResult
|
||||||
|
|
||||||
|
// If Crawler is disabled, do not fetch from search engines
|
||||||
|
if !config.CrawlerEnabled {
|
||||||
|
printDebug("Crawler is disabled; skipping search engine fetching.")
|
||||||
|
return results // Return an empty list
|
||||||
|
}
|
||||||
|
|
||||||
engineCount := len(textSearchEngines)
|
engineCount := len(textSearchEngines)
|
||||||
|
|
||||||
// Determine which engine to use for the current page
|
// Determine which engine to use for the current page
|
||||||
|
@ -117,7 +137,7 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||||
enginePage := (page-1)/engineCount + 1
|
enginePage := (page-1)/engineCount + 1
|
||||||
|
|
||||||
// Debug print to verify engine and page number being fetched
|
// Debug print to verify engine and page number being fetched
|
||||||
printInfo("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
|
printDebug("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
|
||||||
|
|
||||||
// Fetch results from the selected engine
|
// Fetch results from the selected engine
|
||||||
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
|
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
|
||||||
|
@ -146,7 +166,6 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Final debug print to display results count and source
|
|
||||||
printInfo("Fetched %d results for overall page %d", len(results), page)
|
printInfo("Fetched %d results for overall page %d", len(results), page)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
13
video.go
13
video.go
|
@ -150,7 +150,11 @@ func makeHTMLRequest(query, safe, lang string, page int) (*VideoAPIResponse, err
|
||||||
func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
results := fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
|
var results []VideoResult
|
||||||
|
if config.CrawlerEnabled {
|
||||||
|
results = fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||||
|
}
|
||||||
|
|
||||||
if len(results) == 0 {
|
if len(results) == 0 {
|
||||||
printWarn("No results from primary search, trying other nodes")
|
printWarn("No results from primary search, trying other nodes")
|
||||||
results = tryOtherNodesForVideoSearch(query, settings.SafeSearch, settings.SearchLanguage, page, []string{hostID})
|
results = tryOtherNodesForVideoSearch(query, settings.SafeSearch, settings.SearchLanguage, page, []string{hostID})
|
||||||
|
@ -178,6 +182,13 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
|
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
|
||||||
|
// Check if the crawler is enabled
|
||||||
|
if !config.CrawlerEnabled {
|
||||||
|
printDebug("Crawler is disabled; skipping video search.")
|
||||||
|
return []VideoResult{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Proceed with Piped API request if CrawlerEnabled
|
||||||
apiResp, err := makeHTMLRequest(query, safe, lang, page)
|
apiResp, err := makeHTMLRequest(query, safe, lang, page)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error fetching video results: %v", err)
|
printWarn("Error fetching video results: %v", err)
|
||||||
|
|
Loading…
Add table
Reference in a new issue