Changed self-crawling as experimental, cleanup unused features
Some checks failed
Run Integration Tests / test (push) Failing after 1m15s

This commit is contained in:
partisan 2025-06-08 22:12:15 +02:00
parent ca87df5df1
commit 49cb7bb94a
27 changed files with 1731 additions and 832 deletions

View file

@ -135,10 +135,10 @@ func randomUserAgent() (string, error) {
r := rand.New(rand.NewSource(time.Now().UnixNano()))
// Overall usage: 80% chance for Chromium, 20% for Firefox
// Overall usage: 85% chance for Chromium, 15% for Firefox
usageStats := map[string]float64{
"Firefox": 20.0,
"Chromium": 80.0,
"Firefox": 15.0,
"Chromium": 85.0,
}
// Weighted random selection of the browser type

View file

@ -11,7 +11,6 @@ import (
// SearchResult is a generic interface for all types of search results.
type SearchResult interface{}
// Define various search result types implementing SearchResult interface
type TextSearchResult struct {
URL string
Header string

102
config.go
View file

@ -29,9 +29,8 @@ type MetaSearchConfig struct {
type Config struct {
Port int
AuthCode string
PeerID string
Peers []string
NodeID string
Nodes []string
Domain string
NodesEnabled bool
MetaSearchEnabled bool
@ -65,8 +64,7 @@ type Config struct {
var defaultConfig = Config{
Port: 5000,
Domain: "localhost",
Peers: []string{},
AuthCode: generateStrongRandomString(64),
Nodes: []string{},
NodesEnabled: false,
MetaSearchEnabled: true,
IndexerEnabled: false,
@ -268,12 +266,6 @@ func createConfig() error {
config = defaultConfig
}
// Generate AuthCode if missing
if config.AuthCode == "" {
config.AuthCode = generateStrongRandomString(64)
printMessage("Generated connection code: %s\n", config.AuthCode)
}
saveConfig(config)
printInfo("Configuration saved successfully.")
return nil
@ -288,29 +280,36 @@ func saveConfig(config Config) {
sec.Key("Domain").SetValue(config.Domain)
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
// Peers section
peersSec := cfg.Section("Peers")
peersSec.Key("AuthCode").SetValue(config.AuthCode)
peersSec.Key("PeerID").SetValue(config.PeerID)
peersSec.Key("Peers").SetValue(strings.Join(config.Peers, ","))
// Nodes section
nodesSec := cfg.Section("Nodes")
nodesSec.Key("NodeID").SetValue(config.NodeID)
nodesSec.Key("Nodes").SetValue(strings.Join(config.Nodes, ","))
// Features section
if config.NodesEnabled != defaultConfig.NodesEnabled ||
config.MetaSearchEnabled != defaultConfig.MetaSearchEnabled ||
config.IndexerEnabled != defaultConfig.IndexerEnabled ||
config.WebsiteEnabled != defaultConfig.WebsiteEnabled ||
config.MetaProxyEnabled != defaultConfig.MetaProxyEnabled ||
config.CrawlerProxyEnabled != defaultConfig.CrawlerProxyEnabled {
featuresSec := cfg.Section("Features")
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.MetaSearchEnabled))
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
featuresSec.Key("MetaProxy").SetValue(strconv.FormatBool(config.MetaProxyEnabled))
featuresSec.Key("CrawlerProxy").SetValue(strconv.FormatBool(config.CrawlerProxyEnabled))
setBoolIfChanged(featuresSec, "Nodes", config.NodesEnabled, defaultConfig.NodesEnabled)
setBoolIfChanged(featuresSec, "Crawler", config.MetaSearchEnabled, defaultConfig.MetaSearchEnabled)
setBoolIfChanged(featuresSec, "Indexer", config.IndexerEnabled, defaultConfig.IndexerEnabled)
setBoolIfChanged(featuresSec, "Website", config.WebsiteEnabled, defaultConfig.WebsiteEnabled)
setBoolIfChanged(featuresSec, "MetaProxy", config.MetaProxyEnabled, defaultConfig.MetaProxyEnabled)
setBoolIfChanged(featuresSec, "CrawlerProxy", config.CrawlerProxyEnabled, defaultConfig.CrawlerProxyEnabled)
}
// Proxies section
proxiesSec := cfg.Section("Proxies")
proxiesSec.Key("MetaProxyStrict").SetValue(strconv.FormatBool(config.MetaProxyStrict))
proxiesSec.Key("MetaProxies").SetValue(strings.Join(config.MetaProxies, ","))
proxiesSec.Key("CrawlerProxyStrict").SetValue(strconv.FormatBool(config.CrawlerProxyStrict))
proxiesSec.Key("CrawlerProxies").SetValue(strings.Join(config.CrawlerProxies, ","))
setBoolIfChanged(proxiesSec, "CrawlerProxyStrict", config.CrawlerProxyStrict, defaultConfig.CrawlerProxyStrict)
setSliceIfChanged(proxiesSec, "CrawlerProxies", config.CrawlerProxies, defaultConfig.CrawlerProxies)
proxiesSec.Key("MetaProxyRetry").SetValue(strconv.Itoa(config.MetaProxyRetry))
proxiesSec.Key("CrawlerProxyRetry").SetValue(strconv.Itoa(config.CrawlerProxyRetry))
setIntIfChanged(proxiesSec, "CrawlerProxyRetry", config.CrawlerProxyRetry, defaultConfig.CrawlerProxyRetry)
// MetaSearch section
metaSec := cfg.Section("MetaSearches")
@ -321,12 +320,19 @@ func saveConfig(config Config) {
metaSec.Key("Video").SetValue(strings.Join(config.MetaSearch.Video, ","))
// Indexer section
if config.ConcurrentStandardCrawlers != defaultConfig.ConcurrentStandardCrawlers ||
config.ConcurrentChromeCrawlers != defaultConfig.ConcurrentChromeCrawlers ||
config.CrawlingInterval != defaultConfig.CrawlingInterval ||
config.MaxPagesPerDomain != defaultConfig.MaxPagesPerDomain ||
config.IndexBatchSize != defaultConfig.IndexBatchSize {
indexerSec := cfg.Section("Indexer")
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentChromeCrawlers))
indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
setIntIfChanged(indexerSec, "ConcurrentStandardCrawlers", config.ConcurrentStandardCrawlers, defaultConfig.ConcurrentStandardCrawlers)
setIntIfChanged(indexerSec, "ConcurrentChromeCrawlers", config.ConcurrentChromeCrawlers, defaultConfig.ConcurrentChromeCrawlers)
setIfChanged(indexerSec, "CrawlingInterval", config.CrawlingInterval.String(), defaultConfig.CrawlingInterval.String())
setIntIfChanged(indexerSec, "MaxPagesPerDomain", config.MaxPagesPerDomain, defaultConfig.MaxPagesPerDomain)
setIntIfChanged(indexerSec, "IndexBatchSize", config.IndexBatchSize, defaultConfig.IndexBatchSize)
}
// DriveCache section
driveSec := cfg.Section("DriveCache")
@ -357,9 +363,9 @@ func loadConfig() Config {
domain := getConfigValueString(cfg.Section("Server").Key("Domain"), defaultConfig.Domain)
logLevel := getConfigValue(cfg.Section("Server").Key("LogLevel"), defaultConfig.LogLevel, strconv.Atoi)
// Peers
authCode := getConfigValueString(cfg.Section("Peers").Key("AuthCode"), defaultConfig.AuthCode)
peers := strings.Split(getConfigValueString(cfg.Section("Peers").Key("Peers"), ""), ",")
// Nodes
nodeID := getConfigValueString(cfg.Section("Nodes").Key("NodeID"), defaultConfig.NodeID)
nodes := strings.Split(getConfigValueString(cfg.Section("Nodes").Key("Nodes"), ""), ",")
// Features
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
@ -371,7 +377,7 @@ func loadConfig() Config {
metaProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("MetaProxy"), defaultConfig.MetaProxyEnabled)
crawlerProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("CrawlerProxy"), defaultConfig.CrawlerProxyEnabled)
// Proxies
// Nodes
metaProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("MetaProxyStrict"), defaultConfig.MetaProxyStrict)
metaProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("MetaProxies"), ""), ",")
crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
@ -410,8 +416,8 @@ func loadConfig() Config {
Port: port,
Domain: domain,
LogLevel: logLevel,
AuthCode: authCode,
Peers: peers,
NodeID: nodeID,
Nodes: nodes,
NodesEnabled: nodesEnabled,
MetaSearchEnabled: metaSearchEnabled,
IndexerEnabled: indexerEnabled,
@ -450,6 +456,30 @@ func loadConfig() Config {
}
}
func setIfChanged(sec *ini.Section, key string, value string, defaultValue string) {
if value != defaultValue {
sec.Key(key).SetValue(value)
}
}
func setBoolIfChanged(sec *ini.Section, key string, value bool, defaultValue bool) {
if value != defaultValue {
sec.Key(key).SetValue(strconv.FormatBool(value))
}
}
func setIntIfChanged(sec *ini.Section, key string, value int, defaultValue int) {
if value != defaultValue {
sec.Key(key).SetValue(strconv.Itoa(value))
}
}
func setSliceIfChanged(sec *ini.Section, key string, value, defaultValue []string) {
if strings.Join(value, ",") != strings.Join(defaultValue, ",") {
sec.Key(key).SetValue(strings.Join(value, ","))
}
}
// getConfigValue retrieves a configuration value or returns a default value from defaultConfig.
func getConfigValue[T any](key *ini.Key, defaultValue T, parseFunc func(string) (T, error)) T {
if key == nil || key.String() == "" {

View file

@ -1,3 +1,6 @@
//go:build experimental
// +build experimental
package main
import (

View file

@ -1,3 +1,6 @@
//go:build experimental
// +build experimental
package main
import (

View file

@ -1,3 +1,6 @@
//go:build experimental
// +build experimental
package main
import (

View file

@ -28,8 +28,6 @@ var (
rutor TorrentSite
)
var fileResultsChan = make(chan []TorrentResult)
func initFileEngines() {
torrentGalaxy = nil
@ -154,14 +152,6 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
}
}
// If no results, try from other nodes
if len(results) == 0 {
if config.NodesEnabled {
printWarn("No file results found for query: %s, trying other nodes", query)
results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
}
}
return results
}

View file

@ -135,6 +135,15 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
renderTemplate(w, "forums.html", data)
}
func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
results, err := PerformRedditSearch(query, safe, page)
if err != nil {
printWarn("Failed to fetch forum results: %v", err)
return nil
}
return results
}
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
cacheChan := make(chan []SearchResult)
var combinedResults []ForumSearchResult

View file

@ -98,7 +98,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
case results := <-cacheChan:
if results == nil {
if config.MetaSearchEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
combinedResults = fetchImageResults(query, safe, lang, page, synchronous, true)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@ -113,7 +113,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
if config.MetaSearchEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
combinedResults = fetchImageResults(query, safe, lang, page, synchronous, true)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@ -126,7 +126,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
return combinedResults
}
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
func fetchImageResults(query, safe, lang string, page int, synchronous bool, thumbsNeeded bool) []ImageSearchResult {
var results []ImageSearchResult
// Check if MetaSearchEnabled is false
@ -231,7 +231,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
imageURLMapMu.Unlock()
if config.DriveCacheEnabled {
// Cache the thumbnail image asynchronously
if thumbsNeeded {
go func(imgResult ImageSearchResult) {
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, "thumb")
if err != nil || !success {
@ -239,15 +239,17 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
}
}(imageResult)
// Set ProxyThumb to the proxy URL (initially placeholder)
imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)
// Set ProxyFull to the proxy URL
} else {
imageResult.ProxyThumb = "" // fallback ?
}
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
} else {
// Hard cache disabled, proxy both thumb and full images
if thumbsNeeded {
imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
} else {
imageResult.ProxyThumb = ""
}
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
}

View file

@ -1,3 +1,6 @@
//go:build experimental
// +build experimental
package main
import (

142
init-extra.go Normal file
View file

@ -0,0 +1,142 @@
//go:build experimental
// +build experimental
package main
import (
"flag"
"os"
"path/filepath"
)
var config Config
func main() {
// Command-line flags
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
// Parse command-line flags
flag.Parse()
// Override global configFilePath if --config flag is provided
if *configFlag != "" {
configFilePath = *configFlag
}
if *skipConfigFlag {
// Skip interactive configuration
if _, err := os.Stat(configFilePath); err == nil {
// Load from config file if it exists
config = loadConfig()
} else {
// Use defaults if config file does not exist
config = defaultConfig
saveConfig(config) // Save the defaults to config.ini
printInfo("Configuration saved to %s", configFilePath)
}
} else {
// Initialize configuration interactively or from config file
err := initConfig()
if err != nil {
printErr("Error during initialization: %v", err)
return
}
}
// Override with command-line arguments if provided
if *portFlag != 0 {
config.Port = *portFlag
}
if *domainFlag != "" {
config.Domain = *domainFlag
}
loadNodeConfig()
if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
InitProxies()
}
// Initiate Browser Agent updater
if config.MetaSearchEnabled || config.IndexerEnabled {
go periodicAgentUpdate()
}
// Load List of Meta Search Engines
if config.MetaSearchEnabled {
initTextEngines()
initImageEngines()
initFileEngines()
initPipedInstances()
initMusicEngines()
}
InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config)
// Start the node client only if NodesEnabled is true
if config.NodesEnabled {
go startUnixSocketServer(config.NodeID)
printInfo("Node client started.")
} else {
printInfo("Node client is disabled.")
}
// Check if the cache directory exists when caching is enabled
if config.DriveCacheEnabled {
cacheDir := config.DriveCache.Path
imagesDir := filepath.Join(cacheDir, "images")
// Check if the directory already exists
if _, err := os.Stat(imagesDir); os.IsNotExist(err) {
// Try to create the directory since it doesn't exist
if err := os.MkdirAll(imagesDir, os.ModePerm); err != nil {
printErr("Error: Failed to create cache or images directory '%s': %v", imagesDir, err)
os.Exit(1) // Exit with a non-zero status to indicate an error
}
// Print a warning if the directory had to be created
printWarn("Warning: Created missing directory '%s'.", imagesDir)
}
}
// Start periodic cleanup of expired cache files
if config.DriveCacheEnabled {
go cleanExpiredCachedImages()
printInfo("Drive cache started.")
} else {
printInfo("Drive cache is disabled.")
}
// Start periodic cleanup of expired cache files
if config.RamCacheEnabled {
resultsCache = NewResultsCache()
geocodeCache = NewGeocodeCache()
printInfo("RAM cache started.")
} else {
printInfo("RAM cache is disabled.")
}
// Init indexer
if config.IndexerEnabled {
if err := downloadAndSetupDomainsCSV(); err != nil {
printErr("Failed to set up domains.csv: %v", err)
return
}
err := InitIndex()
if err != nil {
printErr("Failed to initialize index: %v", err)
}
webCrawlerInit()
printInfo("Indexer is enabled.")
} else {
printInfo("Indexer is disabled.")
}
runServer()
}

47
init.go
View file

@ -1,3 +1,6 @@
//go:build !experimental
// +build !experimental
package main
import (
@ -51,22 +54,7 @@ func main() {
config.Domain = *domainFlag
}
loadNodeConfig()
if config.AuthCode == "" {
config.AuthCode = generateStrongRandomString(64)
printInfo("Generated connection code: %s\n", config.AuthCode)
saveConfig(config)
}
// Generate Host ID
hostID, nodeErr := generateHostID()
if nodeErr != nil {
printErr("Failed to generate host ID: %v", nodeErr)
}
config.PeerID = hostID
if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
if config.MetaProxyEnabled {
InitProxies()
}
@ -87,14 +75,6 @@ func main() {
InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config)
// Start the node client only if NodesEnabled is true
if config.NodesEnabled {
go startNodeClient()
printInfo("Node client started.")
} else {
printInfo("Node client is disabled.")
}
// Check if the cache directory exists when caching is enabled
if config.DriveCacheEnabled {
cacheDir := config.DriveCache.Path
@ -129,24 +109,5 @@ func main() {
printInfo("RAM cache is disabled.")
}
// Init indexer
if config.IndexerEnabled {
if err := downloadAndSetupDomainsCSV(); err != nil {
printErr("Failed to set up domains.csv: %v", err)
return
}
err := InitIndex()
if err != nil {
printErr("Failed to initialize index: %v", err)
}
webCrawlerInit()
printInfo("Indexer is enabled.")
} else {
printInfo("Indexer is disabled.")
}
runServer()
}

View file

@ -243,10 +243,6 @@ func runServer() {
printInfo("Website is disabled.")
}
if config.NodesEnabled {
http.HandleFunc("/node", handleNodeRequest)
}
printMessage("Server is listening on http://localhost:%d", config.Port)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
}

View file

@ -1,218 +1,203 @@
//go:build experimental
// +build experimental
package main
import (
"encoding/json"
"log"
)
func handleSearchTextMessage(msg Message) {
var searchParams struct {
type searchParams struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
}
func extractTargetFromAddress(addr string) string {
if len(addr) > 5 && addr[len(addr)-5:] == ".sock" {
return addr[:len(addr)-5]
}
err := json.Unmarshal([]byte(msg.Content), &searchParams)
if err != nil {
printWarn("Error parsing search parameters: %v", err)
return addr
}
// Utility to respond to any search
func respondToSearch(req searchParams, msgType uint8, results any) {
if req.ResponseAddr == "" {
printErr("ResponseAddr is empty")
return
}
printDebug("Received search-text request. ResponseAddr: %s", searchParams.ResponseAddr)
results := fetchTextResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
resultsJSON, err := json.Marshal(results)
respBytes, err := json.Marshal(results)
if err != nil {
printWarn("Error marshalling search results: %v", err)
printWarn("Failed to marshal results for msg type %d: %v", msgType, err)
return
}
responseMsg := Message{
ID: hostID,
Type: "text-results",
Content: string(resultsJSON),
resp := Message{
ID: generateMessageID(),
Type: msgType,
Content: respBytes,
Target: req.ResponseAddr,
}
// Log the address to be used for sending the response
printDebug("Sending text search results to %s", searchParams.ResponseAddr)
if searchParams.ResponseAddr == "" {
printErr("Error: Response address is empty")
return
}
err = sendMessage(searchParams.ResponseAddr, responseMsg)
err = sendMessage(resp)
if err != nil {
printWarn("Error sending text search results to %s: %v", searchParams.ResponseAddr, err)
printWarn("Failed to send search results to %s: %v", req.ResponseAddr, err)
}
}
func handleSearchImageMessage(msg Message) {
var searchParams struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
}
err := json.Unmarshal([]byte(msg.Content), &searchParams)
if err != nil {
log.Printf("Error parsing search parameters: %v", err)
func sendBinaryResponse(req searchParams, msgType uint8, payload []byte, msgID uint32) {
if req.ResponseAddr == "" {
printErr("ResponseAddr is empty")
return
}
log.Printf("Received search-image request. ResponseAddr: %s", searchParams.ResponseAddr)
results := fetchImageResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page, true)
resultsJSON, err := json.Marshal(results)
resp := Message{
ID: msgID,
Type: msgType,
Content: payload,
Target: req.ResponseAddr,
}
if err := sendMessage(resp); err != nil {
printWarn("Failed to send binary search results: %v", err)
}
}
func handleSearchTextMessage(msg Message) {
var req searchParams
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
printWarn("Invalid JSON: %v", err)
return
}
printDebug("Received search-text from %s", req.ResponseAddr)
results := fetchTextResults(req.Query, req.Safe, req.Lang, req.Page)
data, err := encodeTextResults(results)
if err != nil {
log.Printf("Error marshalling search results: %v", err)
printWarn("Failed to encode text results: %v", err)
return
}
responseMsg := Message{
ID: hostID,
Type: "image-results",
Content: string(resultsJSON),
}
// Log the address to be used for sending the response
log.Printf("Sending image search results to %s", searchParams.ResponseAddr)
if searchParams.ResponseAddr == "" {
log.Printf("Error: Response address is empty")
return
}
err = sendMessage(searchParams.ResponseAddr, responseMsg)
if err != nil {
log.Printf("Error sending image search results to %s: %v", searchParams.ResponseAddr, err)
}
sendBinaryResponse(req, MsgTypeSearchTextResponse, data, msg.ID)
}
func handleSearchVideoMessage(msg Message) {
var searchParams struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
var req searchParams
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
printWarn("Invalid JSON: %v", err)
return
}
err := json.Unmarshal([]byte(msg.Content), &searchParams)
printDebug("Received search-video from %s", req.ResponseAddr)
results := fetchVideoResults(req.Query, req.Safe, req.Lang, req.Page)
data, err := encodeVideoResults(results)
if err != nil {
log.Printf("Error parsing search parameters: %v", err)
printWarn("Failed to encode video results: %v", err)
return
}
log.Printf("Received search-video request. ResponseAddr: %s", searchParams.ResponseAddr)
sendBinaryResponse(req, MsgTypeSearchVideoResponse, data, msg.ID)
}
results := fetchVideoResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
resultsJSON, err := json.Marshal(results)
func handleSearchMusicMessage(msg Message) {
var req searchParams
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
printWarn("Invalid JSON: %v", err)
return
}
printDebug("Received search-music from %s", req.ResponseAddr)
results := fetchMusicResults(req.Query, req.Page)
data, err := encodeMusicResults(results)
if err != nil {
log.Printf("Error marshalling search results: %v", err)
printWarn("Failed to encode music results: %v", err)
return
}
responseMsg := Message{
ID: hostID,
Type: "video-results",
Content: string(resultsJSON),
}
log.Printf("Sending video search results to %s", searchParams.ResponseAddr)
if searchParams.ResponseAddr == "" {
log.Printf("Error: Response address is empty")
return
}
err = sendMessage(searchParams.ResponseAddr, responseMsg)
if err != nil {
log.Printf("Error sending video search results to %s: %v", searchParams.ResponseAddr, err)
}
sendBinaryResponse(req, MsgTypeSearchMusicResponse, data, msg.ID)
}
func handleSearchFileMessage(msg Message) {
var searchParams struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
var req searchParams
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
printWarn("Invalid JSON: %v", err)
return
}
err := json.Unmarshal([]byte(msg.Content), &searchParams)
printDebug("Received search-file from %s", req.ResponseAddr)
results := fetchFileResults(req.Query, req.Safe, req.Lang, req.Page)
data, err := encodeFileResults(results)
if err != nil {
log.Printf("Error parsing search parameters: %v", err)
printWarn("Failed to encode file results: %v", err)
return
}
log.Printf("Received search-file request. ResponseAddr: %s", searchParams.ResponseAddr)
results := fetchFileResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
resultsJSON, err := json.Marshal(results)
if err != nil {
log.Printf("Error marshalling search results: %v", err)
return
}
responseMsg := Message{
ID: hostID,
Type: "file-results",
Content: string(resultsJSON),
}
log.Printf("Sending file search results to %s", searchParams.ResponseAddr)
if searchParams.ResponseAddr == "" {
log.Printf("Error: Response address is empty")
return
}
err = sendMessage(searchParams.ResponseAddr, responseMsg)
if err != nil {
log.Printf("Error sending file search results to %s: %v", searchParams.ResponseAddr, err)
}
sendBinaryResponse(req, MsgTypeSearchFileResponse, data, msg.ID)
}
func handleSearchForumMessage(msg Message) {
var searchParams struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
var req searchParams
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
printWarn("Invalid JSON: %v", err)
return
}
err := json.Unmarshal([]byte(msg.Content), &searchParams)
printDebug("Received search-forum from %s", req.ResponseAddr)
results := fetchForumResults(req.Query, req.Safe, req.Lang, req.Page)
data, err := encodeForumResults(results)
if err != nil {
log.Printf("Error parsing search parameters: %v", err)
printWarn("Failed to encode forum results: %v", err)
return
}
log.Printf("Received search-forum request. ResponseAddr: %s", searchParams.ResponseAddr)
results := fetchForumResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
resultsJSON, err := json.Marshal(results)
if err != nil {
log.Printf("Error marshalling search results: %v", err)
return
}
responseMsg := Message{
ID: hostID,
Type: "forum-results",
Content: string(resultsJSON),
}
// Log the address to be used for sending the response
log.Printf("Sending forum search results to %s", searchParams.ResponseAddr)
if searchParams.ResponseAddr == "" {
log.Printf("Error: Response address is empty")
return
}
err = sendMessage(searchParams.ResponseAddr, responseMsg)
if err != nil {
log.Printf("Error sending forum search results to %s: %v", searchParams.ResponseAddr, err)
}
sendBinaryResponse(req, MsgTypeSearchForumResponse, data, msg.ID)
}
func handleSearchImageMessage(msg Message) {
var req searchParams
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
printWarn("Invalid JSON: %v", err)
return
}
printDebug("Received image search type %d from %s", msg.Type, req.ResponseAddr)
var (
thumbsNeeded bool
fullNeeded bool
)
switch msg.Type {
case MsgTypeSearchImageRawRequest:
thumbsNeeded = false
fullNeeded = false
case MsgTypeSearchImageThumbRequest:
thumbsNeeded = true
fullNeeded = false
case MsgTypeSearchImageFullRequest:
thumbsNeeded = false
fullNeeded = true
case MsgTypeSearchImageAllRequest:
thumbsNeeded = true
fullNeeded = true
default:
printWarn("Unknown image search type: %d", msg.Type)
return
}
results := fetchImageResults(req.Query, req.Safe, req.Lang, req.Page, true, thumbsNeeded)
if fullNeeded || thumbsNeeded {
results = prepareProxiedImages(results, msg.Type)
}
data, err := encodeImageResults(results)
if err != nil {
printWarn("Failed to encode image results: %v", err)
return
}
sendBinaryResponse(req, MsgTypeSearchImageResponse, data, msg.ID)
}

View file

@ -1,91 +0,0 @@
package main
import (
"log"
"sync"
"time"
)
var (
isMaster bool
masterNode string
masterNodeMux sync.RWMutex
)
const (
heartbeatInterval = 5 * time.Second
heartbeatTimeout = 15 * time.Second
electionTimeout = 10 * time.Second
)
func sendHeartbeats() {
for {
if !isMaster {
return
}
for _, node := range peers {
msg := Message{
ID: hostID,
Type: "heartbeat",
Content: authCode,
}
err := sendMessage(node, msg)
if err != nil {
log.Printf("Error sending heartbeat to %s: %v", node, err)
}
}
time.Sleep(heartbeatInterval)
}
}
func checkMasterHeartbeat() {
for {
time.Sleep(heartbeatTimeout)
masterNodeMux.RLock()
if masterNode == authCode || masterNode == "" {
masterNodeMux.RUnlock()
continue
}
masterNodeMux.RUnlock()
masterNodeMux.Lock()
masterNode = ""
masterNodeMux.Unlock()
startElection()
}
}
func startElection() {
masterNodeMux.Lock()
defer masterNodeMux.Unlock()
for _, node := range peers {
msg := Message{
ID: hostID,
Type: "election",
Content: authCode,
}
err := sendMessage(node, msg)
if err != nil {
log.Printf("Error sending election message to %s: %v", node, err)
}
}
isMaster = true
go sendHeartbeats()
}
func handleHeartbeat(content string) {
masterNodeMux.Lock()
defer masterNodeMux.Unlock()
masterNode = content
}
func handleElection(content string) {
masterNodeMux.Lock()
defer masterNodeMux.Unlock()
if content < authCode {
masterNode = content
}
}

View file

@ -1,19 +1,22 @@
//go:build experimental
// +build experimental
package main
import (
"encoding/json"
"bytes"
"encoding/binary"
"fmt"
"time"
)
func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes []string) []TorrentResult {
for _, nodeAddr := range peers {
if contains(visitedNodes, nodeAddr) {
continue // Skip nodes already visited
}
results, err := sendFileSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
var fileResultsChan = make(chan []TorrentResult)
func tryOtherNodesForFileSearch(query, safe, lang string, page int) []TorrentResult {
for _, nodeTarget := range sockets {
results, err := sendFileSearchRequestToNode(nodeTarget, query, safe, lang, page)
if err != nil {
printWarn("Error contacting node %s: %v", nodeAddr, err)
printWarn("Error contacting node %s: %v", nodeTarget, err)
continue
}
if len(results) > 0 {
@ -23,60 +26,123 @@ func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes
return nil
}
func sendFileSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TorrentResult, error) {
visitedNodes = append(visitedNodes, nodeAddr)
searchParams := struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
VisitedNodes []string `json:"visitedNodes"`
}{
Query: query,
Safe: safe,
Lang: lang,
Page: page,
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
VisitedNodes: visitedNodes,
}
msgBytes, err := json.Marshal(searchParams)
func sendFileSearchRequestToNode(target, query, safe, lang string, page int) ([]TorrentResult, error) {
payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
ID: hostID,
Type: "search-file",
Content: string(msgBytes),
ID: generateMessageID(),
Type: MsgTypeSearchFileRequest,
Content: payload,
Target: target,
}
err = sendMessage(nodeAddr, msg)
if err != nil {
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
if err := sendMessage(msg); err != nil {
return nil, fmt.Errorf("send error: %v", err)
}
// Wait for results
select {
case res := <-fileResultsChan:
return res, nil
case <-time.After(20 * time.Second):
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleFileResultsMessage(msg Message) {
var results []TorrentResult
err := json.Unmarshal([]byte(msg.Content), &results)
results, err := decodeFileResults([]byte(msg.Content))
if err != nil {
printWarn("Error unmarshalling file results: %v", err)
printWarn("Error decoding file results: %v", err)
return
}
printDebug("Received file results: %+v", results)
// Send results to fileResultsChan
go func() {
fileResultsChan <- results
}()
}
func encodeFileResults(results []TorrentResult) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
return nil, err
}
for _, r := range results {
if err := writeString(buf, r.URL); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, uint32(r.Seeders)); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, uint32(r.Leechers)); err != nil {
return nil, err
}
if err := writeString(buf, r.Magnet); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, uint32(r.Views)); err != nil {
return nil, err
}
if err := writeString(buf, r.Size); err != nil {
return nil, err
}
if err := writeString(buf, r.Title); err != nil {
return nil, err
}
}
return buf.Bytes(), nil
}
func decodeFileResults(data []byte) ([]TorrentResult, error) {
buf := bytes.NewReader(data)
var count uint16
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
return nil, err
}
results := make([]TorrentResult, 0, count)
for i := 0; i < int(count); i++ {
url, err := readString(buf)
if err != nil {
return nil, err
}
var seeders, leechers, views uint32
if err := binary.Read(buf, binary.BigEndian, &seeders); err != nil {
return nil, err
}
if err := binary.Read(buf, binary.BigEndian, &leechers); err != nil {
return nil, err
}
magnet, err := readString(buf)
if err != nil {
return nil, err
}
if err := binary.Read(buf, binary.BigEndian, &views); err != nil {
return nil, err
}
size, err := readString(buf)
if err != nil {
return nil, err
}
title, err := readString(buf)
if err != nil {
return nil, err
}
results = append(results, TorrentResult{
URL: url,
Seeders: int(seeders),
Leechers: int(leechers),
Magnet: magnet,
Views: int(views),
Size: size,
Title: title,
})
}
return results, nil
}

View file

@ -1,100 +1,129 @@
//go:build experimental
// +build experimental
package main
import (
"encoding/json"
"bytes"
"encoding/binary"
"fmt"
"time"
)
var forumResultsChan = make(chan []ForumSearchResult)
func tryOtherNodesForForumSearch(query, safe, lang string, page int) []ForumSearchResult {
for _, nodeAddr := range peers {
results, err := sendForumSearchRequestToNode(nodeAddr, query, safe, lang, page, []string{})
func sendForumSearchRequestToNode(target, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
payload, err := encodeSearchTextParams(query, safe, lang, page) // reuse text param encoding
if err != nil {
printWarn("Error contacting node %s: %v", nodeAddr, err)
continue
}
if len(results) > 0 {
return results
}
}
return nil
}
func sendForumSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
// Check if the current node has already been visited
for _, node := range visitedNodes {
if node == hostID {
return nil, fmt.Errorf("loop detected: this node (%s) has already been visited", hostID)
}
}
// Add current node to the list of visited nodes
visitedNodes = append(visitedNodes, hostID)
searchParams := struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
VisitedNodes []string `json:"visitedNodes"`
}{
Query: query,
Safe: safe,
Lang: lang,
Page: page,
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
VisitedNodes: visitedNodes,
}
msgBytes, err := json.Marshal(searchParams)
if err != nil {
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
ID: hostID,
Type: "search-forum",
Content: string(msgBytes),
ID: generateMessageID(),
Type: MsgTypeSearchForumRequest,
Content: payload,
Target: target,
}
err = sendMessage(nodeAddr, msg)
if err != nil {
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
if err := sendMessage(msg); err != nil {
return nil, fmt.Errorf("send error: %v", err)
}
// Wait for results
select {
case res := <-forumResultsChan:
return res, nil
case <-time.After(20 * time.Second):
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleForumResultsMessage(msg Message) {
var results []ForumSearchResult
err := json.Unmarshal([]byte(msg.Content), &results)
results, err := decodeForumResults([]byte(msg.Content))
if err != nil {
printWarn("Error unmarshalling forum results: %v", err)
printWarn("Error decoding forum results: %v", err)
return
}
printDebug("Received forum results: %+v", results)
// Send results to forumResultsChan
go func() {
forumResultsChan <- results
}()
}
// Used only to answer requests
func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
results, err := PerformRedditSearch(query, safe, page)
if err != nil {
printWarn("Error fetching forum results: %v", err)
return nil
func encodeForumResults(results []ForumSearchResult) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
return nil, err
}
return results
for _, r := range results {
if err := writeString(buf, r.URL); err != nil {
return nil, err
}
if err := writeString(buf, r.Header); err != nil {
return nil, err
}
if err := writeString(buf, r.Description); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, r.PublishedDate.Unix()); err != nil {
return nil, err
}
if err := writeString(buf, r.ImgSrc); err != nil {
return nil, err
}
if err := writeString(buf, r.ThumbnailSrc); err != nil {
return nil, err
}
}
return buf.Bytes(), nil
}
func decodeForumResults(data []byte) ([]ForumSearchResult, error) {
buf := bytes.NewReader(data)
var count uint16
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
return nil, err
}
results := make([]ForumSearchResult, 0, count)
for i := 0; i < int(count); i++ {
url, err := readString(buf)
if err != nil {
return nil, err
}
header, err := readString(buf)
if err != nil {
return nil, err
}
desc, err := readString(buf)
if err != nil {
return nil, err
}
var unixTime int64
if err := binary.Read(buf, binary.BigEndian, &unixTime); err != nil {
return nil, err
}
imgSrc, err := readString(buf)
if err != nil {
return nil, err
}
thumbSrc, err := readString(buf)
if err != nil {
return nil, err
}
results = append(results, ForumSearchResult{
URL: url,
Header: header,
Description: desc,
PublishedDate: time.Unix(unixTime, 0),
ImgSrc: imgSrc,
ThumbnailSrc: thumbSrc,
})
}
return results, nil
}

View file

@ -1,84 +1,198 @@
//go:build experimental
// +build experimental
package main
import (
"encoding/json"
"bytes"
"encoding/binary"
"fmt"
"time"
)
// const (
// MessageTypeSearchImage uint8 = 11
// MessageTypeSearchImageThumb uint8 = 111
// MessageTypeSearchImageFull uint8 = 112
// MessageTypeSearchImageAllProxy uint8 = 113
// MessageTypeImageResults uint8 = 22
// )
var imageResultsChan = make(chan []ImageSearchResult)
func handleImageResultsMessage(msg Message) {
var results []ImageSearchResult
err := json.Unmarshal([]byte(msg.Content), &results)
func sendImageSearchRequestToNode(target, query, safe, lang string, page int, mode uint8) ([]ImageSearchResult, error) {
payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
printWarn("Error unmarshalling image results: %v", err)
return
return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
ID: generateMessageID(),
Type: mode, // one of the image search types
Content: payload,
Target: target,
}
if err := sendMessage(msg); err != nil {
return nil, fmt.Errorf("send error: %v", err)
}
select {
case res := <-imageResultsChan:
return res, nil
case <-time.After(30 * time.Second):
return nil, fmt.Errorf("timeout from %s", target)
}
}
func handleImageResultsMessage(msg Message) {
results, err := decodeImageResults([]byte(msg.Content))
if err != nil {
printWarn("Error decoding image results: %v", err)
return
}
printDebug("Received image results: %+v", results)
// Send results to imageResultsChan
go func() {
imageResultsChan <- results
}()
}
func sendImageSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ImageSearchResult, error) {
visitedNodes = append(visitedNodes, nodeAddr)
searchParams := struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
VisitedNodes []string `json:"visitedNodes"`
}{
Query: query,
Safe: safe,
Lang: lang,
Page: page,
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
VisitedNodes: visitedNodes,
func encodeImageResults(results []ImageSearchResult) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
return nil, err
}
msgBytes, err := json.Marshal(searchParams)
if err != nil {
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
for _, r := range results {
if err := writeString(buf, r.ID); err != nil {
return nil, err
}
if err := writeString(buf, r.Title); err != nil {
return nil, err
}
if err := writeString(buf, r.Full); err != nil {
return nil, err
}
if err := writeString(buf, r.Thumb); err != nil {
return nil, err
}
if err := writeString(buf, r.ProxyFull); err != nil {
return nil, err
}
if err := writeString(buf, r.ProxyThumb); err != nil {
return nil, err
}
if err := writeString(buf, r.Source); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, int32(r.Width)); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, int32(r.Height)); err != nil {
return nil, err
}
}
msg := Message{
ID: hostID,
Type: "search-image",
Content: string(msgBytes),
}
err = sendMessage(nodeAddr, msg)
if err != nil {
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
}
// Wait for results
select {
case res := <-imageResultsChan:
return res, nil
case <-time.After(30 * time.Second):
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
}
return buf.Bytes(), nil
}
func tryOtherNodesForImageSearch(query, safe, lang string, page int, visitedNodes []string) []ImageSearchResult {
for _, nodeAddr := range peers {
if contains(visitedNodes, nodeAddr) {
continue // Skip nodes already visited
func decodeImageResults(data []byte) ([]ImageSearchResult, error) {
buf := bytes.NewReader(data)
var count uint16
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
return nil, err
}
results := make([]ImageSearchResult, 0, count)
for i := 0; i < int(count); i++ {
id, _ := readString(buf)
title, _ := readString(buf)
full, _ := readString(buf)
thumb, _ := readString(buf)
proxyFull, _ := readString(buf)
proxyThumb, _ := readString(buf)
source, _ := readString(buf)
var width, height int32
if err := binary.Read(buf, binary.BigEndian, &width); err != nil {
return nil, err
}
if err := binary.Read(buf, binary.BigEndian, &height); err != nil {
return nil, err
}
results = append(results, ImageSearchResult{
ID: id,
Title: title,
Full: full,
Thumb: thumb,
ProxyFull: proxyFull,
ProxyThumb: proxyThumb,
Source: source,
Width: int(width),
Height: int(height),
})
}
return results, nil
}
func prepareProxiedImages(results []ImageSearchResult, mode uint8) []ImageSearchResult {
for i := range results {
switch mode {
case MsgTypeSearchImageThumbRequest:
results[i].ProxyThumb = "/image/" + results[i].ID + "_thumb.webp"
case MsgTypeSearchImageFullRequest:
results[i].ProxyFull = "/image/" + results[i].ID + "_full.webp"
case MsgTypeSearchImageAllRequest:
results[i].ProxyThumb = "/image/" + results[i].ID + "_thumb.webp"
results[i].ProxyFull = "/image/" + results[i].ID + "_full.webp"
}
results, err := sendImageSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
if err != nil {
printWarn("Error contacting node %s: %v", nodeAddr, err)
continue
}
if len(results) > 0 {
return results
}
}
return nil
}
// func handleSearchImageMessage(msg Message) {
// query, safe, lang, page, err := decodeSearchTextParams([]byte(msg.Content))
// if err != nil {
// printWarn("Error decoding image search parameters: %v", err)
// return
// }
// results := fetchImageResults(query, safe, lang, page, true, true)
// switch msg.Type {
// case MsgTypeSearchImageRawRequest:
// // No proxy URLs needed
// case MsgTypeSearchImageThumbRequest:
// results = prepareProxiedImages(results, MsgTypeSearchImageThumbRequest)
// case MsgTypeSearchImageFullRequest:
// results = prepareProxiedImages(results, MsgTypeSearchImageFullRequest)
// case MsgTypeSearchImageAllRequest:
// results = prepareProxiedImages(results, MsgTypeSearchImageAllRequest)
// default:
// printWarn("Unknown image request mode: %d", msg.Type)
// return
// }
// payload, err := encodeImageResults(results)
// if err != nil {
// printWarn("Error encoding image search results: %v", err)
// return
// }
// response := Message{
// ID: msg.ID,
// Type: MsgTypeSearchImageResponse,
// Content: string(payload),
// Target: msg.Source, // Reply to sender
// }
// if err := sendMessage(response); err != nil {
// printWarn("Error sending image search response: %v", err)
// }
// }

187
node-request-music.go Normal file
View file

@ -0,0 +1,187 @@
//go:build experimental
// +build experimental
package main
import (
"bytes"
"encoding/binary"
"fmt"
"time"
)
var musicResultsChan = make(chan []MusicResult)
func tryOtherNodesForMusicSearch(query, lang string, safe bool, page int) []MusicResult {
safeStr := "inactive"
if safe {
safeStr = "active"
}
for _, nodeTarget := range sockets {
results, err := sendMusicSearchRequestToNode(nodeTarget, query, safeStr, lang, page)
if err != nil {
printWarn("Error contacting node %s: %v", nodeTarget, err)
continue
}
if len(results) > 0 {
return results
}
}
return nil
}
func sendMusicSearchRequestToNode(target, query, safe, lang string, page int) ([]MusicResult, error) {
payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
ID: generateMessageID(),
Type: MsgTypeSearchMusicRequest,
Content: payload,
Target: target,
}
err = sendMessage(msg)
if err != nil {
return nil, fmt.Errorf("failed to send music request to node %s: %v", target, err)
}
select {
case res := <-musicResultsChan:
return res, nil
case <-time.After(20 * time.Second):
return nil, fmt.Errorf("timeout waiting for music results from node %s", target)
}
}
func handleMusicResultsMessage(msg Message) {
results, err := decodeMusicResults([]byte(msg.Content))
if err != nil {
printWarn("Error decoding music results: %v", err)
return
}
printDebug("Received music results: %+v", results)
go func() {
musicResultsChan <- results
}()
}
func encodeMusicResults(results []MusicResult) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
return nil, err
}
for _, r := range results {
if err := writeString(buf, r.URL); err != nil {
return nil, err
}
if err := writeString(buf, r.Title); err != nil {
return nil, err
}
if err := writeString(buf, r.Artist); err != nil {
return nil, err
}
if err := writeString(buf, r.Description); err != nil {
return nil, err
}
if err := writeString(buf, r.PublishedDate); err != nil {
return nil, err
}
if err := writeString(buf, r.Thumbnail); err != nil {
return nil, err
}
if err := writeString(buf, r.Source); err != nil {
return nil, err
}
if err := writeString(buf, r.Duration); err != nil {
return nil, err
}
}
return buf.Bytes(), nil
}
func decodeMusicResults(data []byte) ([]MusicResult, error) {
buf := bytes.NewReader(data)
var count uint16
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
return nil, err
}
results := make([]MusicResult, 0, count)
for i := 0; i < int(count); i++ {
url, err := readString(buf)
if err != nil {
return nil, err
}
title, err := readString(buf)
if err != nil {
return nil, err
}
artist, err := readString(buf)
if err != nil {
return nil, err
}
description, err := readString(buf)
if err != nil {
return nil, err
}
date, err := readString(buf)
if err != nil {
return nil, err
}
thumb, err := readString(buf)
if err != nil {
return nil, err
}
source, err := readString(buf)
if err != nil {
return nil, err
}
duration, err := readString(buf)
if err != nil {
return nil, err
}
results = append(results, MusicResult{
URL: url,
Title: title,
Artist: artist,
Description: description,
PublishedDate: date,
Thumbnail: thumb,
Source: source,
Duration: duration,
})
}
return results, nil
}
// func handleMusicSearchRequest(msg Message) {
// buf := bytes.NewReader([]byte(msg.Content))
// query, _ := readString(buf)
// var page uint16
// binary.Read(buf, binary.BigEndian, &page)
// results := fetchMusicResults(query, int(page))
// encoded, err := encodeMusicResults(results)
// if err != nil {
// printWarn("Encoding music results failed: %v", err)
// return
// }
// reply := Message{
// ID: msg.ID,
// Type: MsgTypeSearchMusicResponse,
// Content: string(encoded),
// Target: msg.Target, // Send back to sender
// }
// sendMessage(reply)
// }

View file

@ -1,21 +1,23 @@
//go:build experimental
// +build experimental
package main
import (
"encoding/json"
"bytes"
"encoding/binary"
"fmt"
"time"
)
var textResultsChan = make(chan []TextSearchResult)
func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes []string) []TextSearchResult {
for _, nodeAddr := range peers {
if contains(visitedNodes, nodeAddr) {
continue // Skip nodes already visited
}
results, err := sendTextSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
// Try other nodes is not defined for every type
func tryOtherNodesForTextSearch(query, safe, lang string, page int) []TextSearchResult {
for _, nodeTarget := range sockets {
results, err := sendTextSearchRequestToNode(nodeTarget, query, safe, lang, page)
if err != nil {
printWarn("Error contacting node %s: %v", nodeAddr, err)
printWarn("Error contacting node %s: %v", nodeTarget, err)
continue
}
if len(results) > 0 {
@ -25,60 +27,102 @@ func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes
return nil
}
func sendTextSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TextSearchResult, error) {
visitedNodes = append(visitedNodes, nodeAddr)
searchParams := struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
VisitedNodes []string `json:"visitedNodes"`
}{
Query: query,
Safe: safe,
Lang: lang,
Page: page,
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
VisitedNodes: visitedNodes,
}
msgBytes, err := json.Marshal(searchParams)
func sendTextSearchRequestToNode(target, query, safe, lang string, page int) ([]TextSearchResult, error) {
payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
ID: hostID,
Type: "search-text",
Content: string(msgBytes),
ID: generateMessageID(), // assume function returns uint32
Type: MsgTypeSearchTextRequest,
Content: payload,
Target: target,
}
err = sendMessage(nodeAddr, msg)
err = sendMessage(msg)
if err != nil {
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
return nil, fmt.Errorf("failed to send search request to node %s: %v", target, err)
}
// Wait for results
select {
case res := <-textResultsChan:
return res, nil
case <-time.After(20 * time.Second):
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleTextResultsMessage(msg Message) {
var results []TextSearchResult
err := json.Unmarshal([]byte(msg.Content), &results)
results, err := decodeTextResults([]byte(msg.Content))
if err != nil {
printWarn("Error unmarshalling text results: %v", err)
printWarn("Error decoding text results: %v", err)
return
}
printDebug("Received text results: %+v", results)
// Send results to textResultsChan
go func() {
textResultsChan <- results
}()
}
func encodeTextResults(results []TextSearchResult) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
return nil, err
}
for _, r := range results {
if err := writeString(buf, r.URL); err != nil {
return nil, err
}
if err := writeString(buf, r.Header); err != nil {
return nil, err
}
if err := writeString(buf, r.Description); err != nil {
return nil, err
}
if err := writeString(buf, r.Source); err != nil {
return nil, err
}
}
return buf.Bytes(), nil
}
func decodeTextResults(data []byte) ([]TextSearchResult, error) {
buf := bytes.NewReader(data)
var count uint16
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
return nil, err
}
results := make([]TextSearchResult, 0, count)
for i := 0; i < int(count); i++ {
url, err := readString(buf)
if err != nil {
return nil, err
}
header, err := readString(buf)
if err != nil {
return nil, err
}
description, err := readString(buf)
if err != nil {
return nil, err
}
source, err := readString(buf)
if err != nil {
return nil, err
}
results = append(results, TextSearchResult{
URL: url,
Header: header,
Description: description,
Source: source,
})
}
return results, nil
}

View file

@ -1,19 +1,22 @@
//go:build experimental
// +build experimental
package main
import (
"encoding/json"
"bytes"
"encoding/binary"
"fmt"
"time"
)
func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNodes []string) []VideoResult {
for _, nodeAddr := range peers {
if contains(visitedNodes, nodeAddr) {
continue // Skip nodes already visited
}
results, err := sendVideoSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
var videoResultsChan = make(chan []VideoResult)
func tryOtherNodesForVideoSearch(query, safe, lang string, page int) []VideoResult {
for _, node := range sockets {
results, err := sendVideoSearchRequestToNode(node, query, safe, lang, page)
if err != nil {
printWarn("Error contacting node %s: %v", nodeAddr, err)
printWarn("Error contacting node %s: %v", node, err)
continue
}
if len(results) > 0 {
@ -23,60 +26,134 @@ func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNode
return nil
}
func sendVideoSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]VideoResult, error) {
visitedNodes = append(visitedNodes, nodeAddr)
searchParams := struct {
Query string `json:"query"`
Safe string `json:"safe"`
Lang string `json:"lang"`
Page int `json:"page"`
ResponseAddr string `json:"responseAddr"`
VisitedNodes []string `json:"visitedNodes"`
}{
Query: query,
Safe: safe,
Lang: lang,
Page: page,
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
VisitedNodes: visitedNodes,
}
msgBytes, err := json.Marshal(searchParams)
func sendVideoSearchRequestToNode(target, query, safe, lang string, page int) ([]VideoResult, error) {
payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
ID: hostID,
Type: "search-video",
Content: string(msgBytes),
ID: generateMessageID(),
Type: MsgTypeSearchVideoRequest,
Content: payload,
Target: target,
}
err = sendMessage(nodeAddr, msg)
if err != nil {
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
if err := sendMessage(msg); err != nil {
return nil, fmt.Errorf("send error: %v", err)
}
// Wait for results
select {
case res := <-videoResultsChan:
return res, nil
case <-time.After(20 * time.Second):
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleVideoResultsMessage(msg Message) {
var results []VideoResult
err := json.Unmarshal([]byte(msg.Content), &results)
results, err := decodeVideoResults([]byte(msg.Content))
if err != nil {
printWarn("Error unmarshalling video results: %v", err)
printWarn("Error decoding video results: %v", err)
return
}
printDebug("Received video results: %+v", results)
// Send results to videoResultsChan
go func() {
videoResultsChan <- results
}()
}
func encodeVideoResults(results []VideoResult) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
return nil, err
}
for _, r := range results {
if err := writeString(buf, r.Href); err != nil {
return nil, err
}
if err := writeString(buf, r.Title); err != nil {
return nil, err
}
if err := writeString(buf, r.Date); err != nil {
return nil, err
}
if err := writeString(buf, r.Views); err != nil {
return nil, err
}
if err := writeString(buf, r.Creator); err != nil {
return nil, err
}
if err := writeString(buf, r.Publisher); err != nil {
return nil, err
}
if err := writeString(buf, r.Image); err != nil {
return nil, err
}
if err := writeString(buf, r.Duration); err != nil {
return nil, err
}
}
return buf.Bytes(), nil
}
func decodeVideoResults(data []byte) ([]VideoResult, error) {
buf := bytes.NewReader(data)
var count uint16
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
return nil, err
}
results := make([]VideoResult, 0, count)
for i := 0; i < int(count); i++ {
href, err := readString(buf)
if err != nil {
return nil, err
}
title, err := readString(buf)
if err != nil {
return nil, err
}
date, err := readString(buf)
if err != nil {
return nil, err
}
views, err := readString(buf)
if err != nil {
return nil, err
}
creator, err := readString(buf)
if err != nil {
return nil, err
}
publisher, err := readString(buf)
if err != nil {
return nil, err
}
image, err := readString(buf)
if err != nil {
return nil, err
}
duration, err := readString(buf)
if err != nil {
return nil, err
}
results = append(results, VideoResult{
Href: href,
Title: title,
Date: date,
Views: views,
Creator: creator,
Publisher: publisher,
Image: image,
Duration: duration,
})
}
return results, nil
}

View file

@ -1,28 +0,0 @@
package main
import (
"fmt"
"log"
"time"
)
// Function to sync updates across all nodes
func nodeUpdateSync() {
fmt.Println("Syncing updates across all nodes...")
for _, peerAddr := range peers {
fmt.Printf("Notifying node %s about update...\n", peerAddr)
msg := Message{
ID: hostID,
Type: "update",
Content: "Start update process",
}
err := sendMessage(peerAddr, msg)
if err != nil {
log.Printf("Failed to notify node %s: %v\n", peerAddr, err)
continue
}
fmt.Printf("Node %s notified. Waiting for it to update...\n", peerAddr)
time.Sleep(30 * time.Second) // Adjust sleep time as needed to allow for updates
}
fmt.Println("All nodes have been updated.")
}

341
node.go
View file

@ -1,78 +1,152 @@
//go:build experimental
// +build experimental
package main
import (
"bytes"
"crypto/rand"
"encoding/binary"
"encoding/json"
"errors"
"fmt"
"io"
"net"
"net/http"
"os"
"time"
)
var (
authCode string
peers []string
sockets []string
hostID string
socketDir string
)
type Message struct {
ID string `json:"id"`
Type string `json:"type"`
Content string `json:"content"`
VisitedNodes []string `json:"visitedNodes"`
ID uint32
Type uint8
Content []byte
Target string
}
const (
MsgTypeNone uint8 = 0
MsgTypeTest uint8 = 1
// Request types (1099)
MsgTypeSearchTextRequest uint8 = 10
MsgTypeSearchImageRawRequest uint8 = 11
MsgTypeSearchImageThumbRequest uint8 = 12
MsgTypeSearchImageFullRequest uint8 = 13
MsgTypeSearchImageAllRequest uint8 = 14
MsgTypeSearchVideoRequest uint8 = 15
MsgTypeSearchFileRequest uint8 = 16
MsgTypeSearchForumRequest uint8 = 17
MsgTypeSearchMusicRequest uint8 = 18
// Response types (110199)
MsgTypeSearchTextResponse uint8 = 110
MsgTypeSearchImageResponse uint8 = 111
MsgTypeSearchVideoResponse uint8 = 112
MsgTypeSearchFileResponse uint8 = 113
MsgTypeSearchForumResponse uint8 = 114
MsgTypeSearchMusicResponse uint8 = 115
)
func loadNodeConfig() {
authCode = config.AuthCode
peers = config.Peers
sockets = config.Nodes
socketDir = "/tmp/" // Directory where sockets are stored, for now fixed tmp dir, can be changed later
}
func generateHostID() (string, error) {
bytes := make([]byte, 16)
_, err := rand.Read(bytes)
if err != nil {
return "", fmt.Errorf("failed to generate host ID: %v", err)
var messageIDCounter uint32 = 0
func generateMessageID() uint32 {
if messageIDCounter == ^uint32(0) { // 0xFFFFFFFF
messageIDCounter = 1
} else {
messageIDCounter++
}
return fmt.Sprintf("%x", bytes), nil
return messageIDCounter
}
func sendMessage(serverAddr string, msg Message) error {
if serverAddr == "" {
return fmt.Errorf("server address is empty")
func encodeSearchTextParams(query, safe, lang string, page int) ([]byte, error) {
buf := new(bytes.Buffer)
if err := writeString(buf, query); err != nil {
return nil, err
}
if err := writeString(buf, safe); err != nil {
return nil, err
}
if err := writeString(buf, lang); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, uint16(page)); err != nil {
return nil, err
}
msgBytes, err := json.Marshal(msg)
return buf.Bytes(), nil
}
func sendMessage(msg Message) error {
socketPath := socketDir + msg.Target + ".sock"
conn, err := net.Dial("unix", socketPath)
if err != nil {
return fmt.Errorf("failed to marshal message: %v", err)
return fmt.Errorf("failed to connect to socket %s: %v", socketPath, err)
}
defer conn.Close()
req, err := http.NewRequest("POST", serverAddr, bytes.NewBuffer(msgBytes))
msgBytes, err := serializeMessage(msg)
if err != nil {
return fmt.Errorf("failed to create request: %v", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", authCode)
client := &http.Client{
Timeout: time.Second * 10,
return fmt.Errorf("serialization error: %v", err)
}
resp, err := client.Do(req)
_, err = conn.Write(msgBytes)
return err
}
func startUnixSocketServer(socketName string) {
socketPath := socketDir + socketName + ".sock"
if _, err := os.Stat(socketPath); err == nil {
os.Remove(socketPath)
}
listener, err := net.Listen("unix", socketPath)
if err != nil {
return fmt.Errorf("failed to send request: %v", err)
panic(fmt.Sprintf("Failed to listen on %s: %v", socketPath, err))
}
defer resp.Body.Close()
defer listener.Close()
os.Chmod(socketPath, 0666)
if resp.StatusCode != http.StatusOK {
body, err := io.ReadAll(resp.Body)
printInfo("Listening on UNIX socket: %s", socketPath)
for {
conn, err := listener.Accept()
if err != nil {
return fmt.Errorf("failed to read response body: %v", err)
}
return fmt.Errorf("server error: %s", body)
printWarn("Accept error: %v", err)
continue
}
return nil
go func(c net.Conn) {
defer c.Close()
buf, err := io.ReadAll(c)
if err != nil {
printWarn("Read error: %v", err)
return
}
msg, err := deserializeMessage(buf)
if err != nil {
printWarn("Deserialization error: %v", err)
return
}
printDebug("Received binary message: %+v", msg)
interpretMessage(msg)
}(conn)
}
}
func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
@ -81,12 +155,6 @@ func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
return
}
auth := r.Header.Get("Authorization")
if auth != authCode {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
var msg Message
err := json.NewDecoder(r.Body).Decode(&msg)
if err != nil {
@ -95,64 +163,177 @@ func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
}
defer r.Body.Close()
printDebug("Received message: %+v\n", msg)
w.Write([]byte("Message received"))
printDebug("Received HTTP message: %+v", msg)
interpretMessage(msg)
}
func startNodeClient() {
for {
for _, peerAddr := range peers {
msg := Message{
ID: hostID,
Type: "test",
Content: "This is a test message from the client node",
}
func startNodeClientUnix() {
var idCounter uint32 = 0
err := sendMessage(peerAddr, msg)
if err != nil {
printWarn("Error sending message to %s: %v", peerAddr, err)
} else {
printInfo("Message sent successfully to: %s", peerAddr)
for {
msg := Message{
ID: idCounter,
Type: MsgTypeTest,
Content: []byte("This is a test message via UNIX socket"),
Target: "node2", ///!!!
}
idCounter++
if err := sendMessage(msg); err != nil {
printWarn("Send error: %v", err)
}
time.Sleep(10 * time.Second)
}
}
func interpretMessage(msg Message) {
printDebug("Received message: %s", msg.Content)
switch msg.Type {
case "test":
printDebug("Received test message: %v", msg.Content)
case "update":
printDebug("Received update message: %v", msg.Content)
go update()
case "heartbeat":
handleHeartbeat(msg.Content)
case "election":
handleElection(msg.Content)
case "search-text":
case MsgTypeTest:
handleTestMessage(msg)
case MsgTypeSearchTextRequest:
handleSearchTextMessage(msg)
case "search-image":
case MsgTypeSearchImageRawRequest, MsgTypeSearchImageThumbRequest, MsgTypeSearchImageFullRequest, MsgTypeSearchImageAllRequest:
handleSearchImageMessage(msg)
case "search-video":
case MsgTypeSearchVideoRequest:
handleSearchVideoMessage(msg)
case "search-file":
case MsgTypeSearchFileRequest:
handleSearchFileMessage(msg)
case "search-forum":
case MsgTypeSearchForumRequest:
handleSearchForumMessage(msg)
case "forum-results":
handleForumResultsMessage(msg)
case "text-results":
case MsgTypeSearchMusicRequest:
handleSearchMusicMessage(msg)
case MsgTypeSearchTextResponse:
handleTextResultsMessage(msg)
case "image-results":
case MsgTypeSearchImageResponse:
handleImageResultsMessage(msg)
case "video-results":
case MsgTypeSearchVideoResponse:
handleVideoResultsMessage(msg)
case "file-results":
case MsgTypeSearchFileResponse:
handleFileResultsMessage(msg)
case MsgTypeSearchForumResponse:
handleForumResultsMessage(msg)
case MsgTypeSearchMusicResponse:
handleMusicResultsMessage(msg)
default:
printWarn("Received unknown message type: %v", msg.Type)
printWarn("Unknown message type: %d", msg.Type)
}
}
// Serialize Message to binary
func serializeMessage(msg Message) ([]byte, error) {
buf := new(bytes.Buffer)
if err := binary.Write(buf, binary.BigEndian, msg.ID); err != nil {
return nil, err
}
if err := binary.Write(buf, binary.BigEndian, msg.Type); err != nil {
return nil, err
}
// Content
contentBytes := []byte(msg.Content)
if len(contentBytes) > 65535 {
return nil, errors.New("content too long")
}
if err := binary.Write(buf, binary.BigEndian, uint16(len(contentBytes))); err != nil {
return nil, err
}
buf.Write(contentBytes)
// Target
targetBytes := []byte(msg.Target)
if len(targetBytes) > 255 {
return nil, errors.New("target name too long")
}
if err := buf.WriteByte(uint8(len(targetBytes))); err != nil {
return nil, err
}
buf.Write(targetBytes)
return buf.Bytes(), nil
}
// Deserialize binary to Message
func deserializeMessage(data []byte) (Message, error) {
buf := bytes.NewReader(data)
var msg Message
if err := binary.Read(buf, binary.BigEndian, &msg.ID); err != nil {
return msg, err
}
if err := binary.Read(buf, binary.BigEndian, &msg.Type); err != nil {
return msg, err
}
var contentLen uint16
if err := binary.Read(buf, binary.BigEndian, &contentLen); err != nil {
return msg, err
}
content := make([]byte, contentLen)
if _, err := io.ReadFull(buf, content); err != nil {
return msg, err
}
msg.Content = content
var targetLen uint8
if err := binary.Read(buf, binary.BigEndian, &targetLen); err != nil {
return msg, err
}
target := make([]byte, targetLen)
if _, err := io.ReadFull(buf, target); err != nil {
return msg, err
}
msg.Target = string(target)
return msg, nil
}
func writeString(buf *bytes.Buffer, s string) error {
if err := binary.Write(buf, binary.BigEndian, uint16(len(s))); err != nil {
return err
}
_, err := buf.Write([]byte(s))
return err
}
func readString(buf *bytes.Reader) (string, error) {
var length uint16
if err := binary.Read(buf, binary.BigEndian, &length); err != nil {
return "", err
}
strBytes := make([]byte, length)
if _, err := io.ReadFull(buf, strBytes); err != nil {
return "", err
}
return string(strBytes), nil
}
type testPayload struct {
Message string `json:"message"`
ResponseAddr string `json:"ResponseAddr"`
}
func handleTestMessage(msg Message) {
var payload testPayload
if err := json.Unmarshal([]byte(msg.Content), &payload); err != nil {
printWarn("Failed to parse test payload: %v", err)
return
}
printDebug("Received message: %s", payload.Message)
printInfo("Received TEST message: %s", payload.Message)
reply := Message{
ID: msg.ID,
Type: MsgTypeTest,
Content: []byte("hello test"),
Target: payload.ResponseAddr,
}
if err := sendMessage(reply); err != nil {
printWarn("Failed to send test response: %v", err)
}
}

272
text-extra.go Executable file
View file

@ -0,0 +1,272 @@
//go:build experimental
// +build experimental
package main
import (
"fmt"
"net/http"
"os"
"path/filepath"
"time"
)
var textSearchEngines []SearchEngine
var allTextSearchEngines = []SearchEngine{
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch)},
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
{Name: "Quant", Func: wrapTextSearchFunc(PerformQwantTextSearch)}, // Broken !
//{Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXTextSearch)}, // bruh
}
func initTextEngines() {
// textSearchEngines is your final slice (already declared globally)
textSearchEngines = nil // or make([]SearchEngine, 0)
for _, engineName := range config.MetaSearch.Text {
for _, candidate := range allTextSearchEngines {
if candidate.Name == engineName {
textSearchEngines = append(textSearchEngines, candidate)
break
}
}
}
}
func HandleTextSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
startTime := time.Now()
cacheKey := CacheKey{
Query: query,
Page: page,
Safe: settings.SafeSearch == "active",
Lang: settings.SearchLanguage,
Type: "text",
}
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
hasPrevPage := page > 1
// Prefetch next and previous pages asynchronously
go prefetchPage(query, settings.SafeSearch, settings.SearchLanguage, page+1)
if hasPrevPage {
go prefetchPage(query, settings.SafeSearch, settings.SearchLanguage, page-1)
}
elapsedTime := time.Since(startTime)
// Simplified result structure without waiting for favicons
type DecoratedResult struct {
TextSearchResult
PrettyLink LinkParts
FaviconID string // Just the ID, URL will be generated client-side
}
var decoratedResults []DecoratedResult
for _, r := range combinedResults {
if r.URL == "" {
continue
}
prettyLink := FormatLinkHTML(r.URL)
faviconID := faviconIDFromURL(prettyLink.RootURL)
decoratedResults = append(decoratedResults, DecoratedResult{
TextSearchResult: r,
PrettyLink: prettyLink,
FaviconID: faviconID,
})
// Start async favicon fetch if not already cached
go ensureFaviconIsCached(faviconID, prettyLink.RootURL)
}
data := map[string]interface{}{
"Results": decoratedResults,
"Query": query,
"Fetched": FormatElapsedTime(elapsedTime),
"Page": page,
"HasPrevPage": hasPrevPage,
"HasNextPage": len(combinedResults) >= 50,
"NoResults": len(combinedResults) == 0,
"LanguageOptions": languageOptions,
"CurrentLang": settings.SearchLanguage,
"Theme": settings.Theme,
"Safe": settings.SafeSearch,
"IsThemeDark": settings.IsThemeDark,
"Trans": Translate,
"HardCacheEnabled": config.DriveCacheEnabled,
}
renderTemplate(w, "text.html", data)
}
func ensureFaviconIsCached(faviconID, rootURL string) {
// Check if already exists in cache
filename := fmt.Sprintf("%s_icon.webp", faviconID)
cachedPath := filepath.Join(config.DriveCache.Path, "images", filename)
if _, err := os.Stat(cachedPath); err == nil {
return // Already cached
}
// Not cached, initiate download
getFaviconProxyURL("", rootURL) // This will trigger async download
}
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
cacheChan := make(chan []SearchResult)
var combinedResults []TextSearchResult
go func() {
results, exists := resultsCache.Get(cacheKey)
if exists {
printDebug("Cache hit")
cacheChan <- results
} else {
printDebug("Cache miss")
cacheChan <- nil
}
}()
select {
case results := <-cacheChan:
if results == nil {
// Always attempt to fetch results on a cache miss
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
textResults, _, _, _, _ := convertToSpecificResults(results)
combinedResults = textResults
}
case <-time.After(2 * time.Second):
printInfo("Cache check timeout")
// Even on timeout, attempt to fetch results
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
}
return combinedResults
}
func prefetchPage(query, safe, lang string, page int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists {
printInfo("Page %d not cached, caching now...", page)
if config.MetaSearchEnabled {
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
}
} else {
printInfo("Crawler disabled; skipping prefetch for page %d", page)
}
} else {
printInfo("Page %d already cached", page)
}
}
// The logic in this function is rotating search engines instead of running them in order as noted in the wiki
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult
if !config.MetaSearchEnabled {
printDebug("Crawler is disabled; fetching from local index.")
// Calculate the starting position based on the page number
indexedResults, err := SearchIndex(query, page, 10)
if err != nil {
printErr("Error searching the index: %v", err)
return results // Return empty results on error
}
// Convert indexed results to TextSearchResult format
for _, doc := range indexedResults {
results = append(results, TextSearchResult{
URL: doc.Link,
Header: doc.Title,
Description: doc.Description,
Source: doc.Tags,
})
}
return results
} else {
// Crawler is enabled, so use the search engines
engineCount := len(textSearchEngines)
// Determine which engine to use for the current page
engineIndex := (page - 1) % engineCount
engine := textSearchEngines[engineIndex]
// Calculate the page number for this engine
enginePage := (page-1)/engineCount + 1
printDebug("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
// Fetch results from the selected engine
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
if err != nil {
printWarn("Error performing search with %s: %v", engine.Name, err)
} else {
results = append(results, validateResults(searchResults)...)
}
// If no results are found with the selected engine, try the next in line
if len(results) == 0 {
for i := 1; i < engineCount; i++ {
nextEngine := textSearchEngines[(engineIndex+i)%engineCount]
enginePage = (page-1)/engineCount + 1
printInfo("No results found, trying next engine: %s (engine page %d)", nextEngine.Name, enginePage)
searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
if err != nil {
printWarn("Error performing search with %s: %v", nextEngine.Name, err)
continue
}
results = append(results, validateResults(searchResults)...)
if len(results) > 0 {
break
}
}
}
printInfo("Fetched %d results for overall page %d", len(results), page)
return results
}
}
func validateResults(searchResults []SearchResult) []TextSearchResult {
var validResults []TextSearchResult
// Remove anything that is missing a URL or Header
for _, result := range searchResults {
textResult := result.(TextSearchResult)
if textResult.URL != "" || textResult.Header != "" {
validResults = append(validResults, textResult)
}
}
return validResults
}
func wrapTextSearchFunc(f func(string, string, string, int) ([]TextSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
textResults, duration, err := f(query, safe, lang, page)
if err != nil {
return nil, duration, err
}
searchResults := make([]SearchResult, len(textResults))
for i, result := range textResults {
searchResults[i] = result
}
return searchResults, duration, nil
}
}

26
text.go
View file

@ -1,3 +1,6 @@
//go:build !experimental
// +build !experimental
package main
import (
@ -174,28 +177,6 @@ func prefetchPage(query, safe, lang string, page int) {
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult
if !config.MetaSearchEnabled {
printDebug("Crawler is disabled; fetching from local index.")
// Calculate the starting position based on the page number
indexedResults, err := SearchIndex(query, page, 10)
if err != nil {
printErr("Error searching the index: %v", err)
return results // Return empty results on error
}
// Convert indexed results to TextSearchResult format
for _, doc := range indexedResults {
results = append(results, TextSearchResult{
URL: doc.Link,
Header: doc.Title,
Description: doc.Description,
Source: doc.Tags,
})
}
return results
} else {
// Crawler is enabled, so use the search engines
engineCount := len(textSearchEngines)
@ -237,7 +218,6 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
printInfo("Fetched %d results for overall page %d", len(results), page)
return results
}
}
func validateResults(searchResults []SearchResult) []TextSearchResult {

View file

@ -1,52 +0,0 @@
package main
import (
"fmt"
"os"
"os/exec"
"time"
)
// Function to check for updates and restart the server if an update is found
func checkForUpdates() {
repoURL := "https://weforge.xyz/Spitfire/Search.git"
localDir := "." // Assume the repository is cloned in the current directory
for {
err := gitPull(repoURL, localDir)
if err != nil {
fmt.Println("Error checking for updates:", err)
time.Sleep(10 * time.Minute)
continue
}
fmt.Println("Update found. Syncing updates...")
nodeUpdateSync()
fmt.Println("Restarting server to apply updates...")
update()
time.Sleep(10 * time.Minute)
}
}
// Function to pull updates from the Git repository
func gitPull(repoURL, localDir string) error {
cmd := exec.Command("git", "-C", localDir, "pull", repoURL)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
return cmd.Run()
}
// Function to download updates and restart the server
func update() {
cmd := exec.Command("sh", "run.sh")
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Start()
if err != nil {
fmt.Println("Error starting the server:", err)
return
}
os.Exit(0)
}

View file

@ -15,7 +15,6 @@ var (
pipedInstances = []string{}
disabledInstances = make(map[string]bool)
mu sync.Mutex
videoResultsChan = make(chan []VideoResult) // Channel to receive video results from other nodes
)
func initPipedInstances() {
@ -146,11 +145,6 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
results = fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
}
if len(results) == 0 {
printWarn("No results from primary search, trying other nodes")
results = tryOtherNodesForVideoSearch(query, settings.SafeSearch, settings.SearchLanguage, page, []string{hostID})
}
elapsed := time.Since(start)
// Prepare the data to pass to the template