cleanup
This commit is contained in:
parent
cdc4c60618
commit
8fece91f75
16 changed files with 2275 additions and 2252 deletions
710
agent.go
Normal file → Executable file
710
agent.go
Normal file → Executable file
|
@ -1,355 +1,355 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BrowserVersion struct {
|
||||
Version string `json:"version"`
|
||||
Global float64 `json:"global"`
|
||||
}
|
||||
|
||||
type BrowserData struct {
|
||||
Firefox []BrowserVersion `json:"firefox"`
|
||||
Chromium []BrowserVersion `json:"chrome"`
|
||||
}
|
||||
|
||||
var (
|
||||
cache = struct {
|
||||
sync.RWMutex
|
||||
data map[string]string
|
||||
}{
|
||||
data: make(map[string]string),
|
||||
}
|
||||
browserCache = struct {
|
||||
sync.RWMutex
|
||||
data BrowserData
|
||||
expires time.Time
|
||||
}{
|
||||
expires: time.Now(),
|
||||
}
|
||||
)
|
||||
|
||||
func fetchLatestBrowserVersions() (BrowserData, error) {
|
||||
url := "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
|
||||
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
|
||||
var rawData map[string]interface{}
|
||||
if err := json.Unmarshal(body, &rawData); err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
|
||||
stats := rawData["agents"].(map[string]interface{})
|
||||
|
||||
var data BrowserData
|
||||
|
||||
if firefoxData, ok := stats["firefox"].(map[string]interface{}); ok {
|
||||
for version, usage := range firefoxData["usage_global"].(map[string]interface{}) {
|
||||
data.Firefox = append(data.Firefox, BrowserVersion{
|
||||
Version: version,
|
||||
Global: usage.(float64),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if chromeData, ok := stats["chrome"].(map[string]interface{}); ok {
|
||||
for version, usage := range chromeData["usage_global"].(map[string]interface{}) {
|
||||
data.Chromium = append(data.Chromium, BrowserVersion{
|
||||
Version: version,
|
||||
Global: usage.(float64),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func getLatestBrowserVersions() (BrowserData, error) {
|
||||
browserCache.RLock()
|
||||
if time.Now().Before(browserCache.expires) {
|
||||
data := browserCache.data
|
||||
browserCache.RUnlock()
|
||||
return data, nil
|
||||
}
|
||||
browserCache.RUnlock()
|
||||
|
||||
data, err := fetchLatestBrowserVersions()
|
||||
if err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
|
||||
browserCache.Lock()
|
||||
browserCache.data = data
|
||||
browserCache.expires = time.Now().Add(24 * time.Hour)
|
||||
browserCache.Unlock()
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func randomUserAgent() (string, error) {
|
||||
browsers, err := getLatestBrowserVersions()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
|
||||
// Simulated browser usage statistics (in percentages)
|
||||
usageStats := map[string]float64{
|
||||
"Firefox": 30.0,
|
||||
"Chromium": 70.0,
|
||||
}
|
||||
|
||||
// Calculate the probabilities for the versions
|
||||
probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
|
||||
|
||||
// Select a browser based on usage statistics
|
||||
browserType := ""
|
||||
randVal := rand.Float64() * 100
|
||||
cumulative := 0.0
|
||||
for browser, usage := range usageStats {
|
||||
cumulative += usage
|
||||
if randVal < cumulative {
|
||||
browserType = browser
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var versions []BrowserVersion
|
||||
switch browserType {
|
||||
case "Firefox":
|
||||
versions = browsers.Firefox
|
||||
case "Chromium":
|
||||
versions = browsers.Chromium
|
||||
}
|
||||
|
||||
if len(versions) == 0 {
|
||||
return "", fmt.Errorf("no versions found for browser: %s", browserType)
|
||||
}
|
||||
|
||||
// Sort versions by usage (descending order)
|
||||
sort.Slice(versions, func(i, j int) bool {
|
||||
return versions[i].Global > versions[j].Global
|
||||
})
|
||||
|
||||
// Select a version based on the probabilities
|
||||
version := ""
|
||||
randVal = rand.Float64()
|
||||
cumulative = 0.0
|
||||
for i, p := range probabilities {
|
||||
cumulative += p
|
||||
if randVal < cumulative && i < len(versions) {
|
||||
version = versions[i].Version
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if version == "" {
|
||||
version = versions[len(versions)-1].Version
|
||||
}
|
||||
|
||||
// Generate the user agent string
|
||||
userAgent := generateUserAgent(browserType, version)
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
func generateUserAgent(browser, version string) string {
|
||||
oses := []struct {
|
||||
os string
|
||||
probability float64
|
||||
}{
|
||||
{"Windows NT 10.0; Win64; x64", 44.0},
|
||||
{"Windows NT 11.0; Win64; x64", 44.0},
|
||||
{"X11; Linux x86_64", 1.0},
|
||||
{"X11; Ubuntu; Linux x86_64", 1.0},
|
||||
{"Macintosh; Intel Mac OS X 10_15_7", 10.0},
|
||||
}
|
||||
|
||||
// Select an OS based on probabilities
|
||||
randVal := rand.Float64() * 100
|
||||
cumulative := 0.0
|
||||
selectedOS := ""
|
||||
for _, os := range oses {
|
||||
cumulative += os.probability
|
||||
if randVal < cumulative {
|
||||
selectedOS = os.os
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
switch browser {
|
||||
case "Firefox":
|
||||
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
|
||||
case "Chromium":
|
||||
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func updateCachedUserAgents(newVersions BrowserData) {
|
||||
cache.Lock()
|
||||
defer cache.Unlock()
|
||||
for key, userAgent := range cache.data {
|
||||
randVal := rand.Float64()
|
||||
if randVal < 0.5 {
|
||||
updatedUserAgent := updateUserAgentVersion(userAgent, newVersions)
|
||||
cache.data[key] = updatedUserAgent
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
|
||||
// Parse the current user agent to extract browser and version
|
||||
var browserType, version string
|
||||
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
}
|
||||
|
||||
// Get the latest version for the browser type
|
||||
var latestVersion string
|
||||
if browserType == "Firefox" {
|
||||
latestVersion = newVersions.Firefox[0].Version
|
||||
} else if browserType == "Chromium" {
|
||||
latestVersion = newVersions.Chromium[0].Version
|
||||
}
|
||||
|
||||
// Update the user agent string with the new version
|
||||
return generateUserAgent(browserType, latestVersion)
|
||||
}
|
||||
|
||||
func periodicUpdate() {
|
||||
for {
|
||||
// Sleep for a random interval between 1 and 2 days
|
||||
time.Sleep(time.Duration(24+rand.Intn(24)) * time.Hour)
|
||||
|
||||
// Fetch the latest browser versions
|
||||
newVersions, err := fetchLatestBrowserVersions()
|
||||
if err != nil {
|
||||
fmt.Println("Error fetching latest browser versions:", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Update the browser version cache
|
||||
browserCache.Lock()
|
||||
browserCache.data = newVersions
|
||||
browserCache.expires = time.Now().Add(24 * time.Hour)
|
||||
browserCache.Unlock()
|
||||
|
||||
// Update the cached user agents
|
||||
updateCachedUserAgents(newVersions)
|
||||
}
|
||||
}
|
||||
|
||||
func GetUserAgent(cacheKey string) (string, error) {
|
||||
cache.RLock()
|
||||
userAgent, found := cache.data[cacheKey]
|
||||
cache.RUnlock()
|
||||
|
||||
if found {
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
userAgent, err := randomUserAgent()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cache.Lock()
|
||||
cache.data[cacheKey] = userAgent
|
||||
cache.Unlock()
|
||||
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
func GetNewUserAgent(cacheKey string) (string, error) {
|
||||
userAgent, err := randomUserAgent()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cache.Lock()
|
||||
cache.data[cacheKey] = userAgent
|
||||
cache.Unlock()
|
||||
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
go periodicUpdate()
|
||||
}
|
||||
|
||||
// func main() {
|
||||
// go periodicUpdate() // not needed here
|
||||
|
||||
// cacheKey := "image-search"
|
||||
// userAgent, err := GetUserAgent(cacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("Generated User Agent:", userAgent)
|
||||
|
||||
// // Request a new user agent for the same key
|
||||
// newUserAgent, err := GetNewUserAgent(cacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("New User Agent:", newUserAgent)
|
||||
|
||||
// AcacheKey := "image-search"
|
||||
// AuserAgent, err := GetUserAgent(AcacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("Generated User Agent:", AuserAgent)
|
||||
|
||||
// DcacheKey := "image-search"
|
||||
// DuserAgent, err := GetUserAgent(DcacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("Generated User Agent:", DuserAgent)
|
||||
|
||||
// }
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BrowserVersion struct {
|
||||
Version string `json:"version"`
|
||||
Global float64 `json:"global"`
|
||||
}
|
||||
|
||||
type BrowserData struct {
|
||||
Firefox []BrowserVersion `json:"firefox"`
|
||||
Chromium []BrowserVersion `json:"chrome"`
|
||||
}
|
||||
|
||||
var (
|
||||
cache = struct {
|
||||
sync.RWMutex
|
||||
data map[string]string
|
||||
}{
|
||||
data: make(map[string]string),
|
||||
}
|
||||
browserCache = struct {
|
||||
sync.RWMutex
|
||||
data BrowserData
|
||||
expires time.Time
|
||||
}{
|
||||
expires: time.Now(),
|
||||
}
|
||||
)
|
||||
|
||||
func fetchLatestBrowserVersions() (BrowserData, error) {
|
||||
url := "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
|
||||
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
|
||||
var rawData map[string]interface{}
|
||||
if err := json.Unmarshal(body, &rawData); err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
|
||||
stats := rawData["agents"].(map[string]interface{})
|
||||
|
||||
var data BrowserData
|
||||
|
||||
if firefoxData, ok := stats["firefox"].(map[string]interface{}); ok {
|
||||
for version, usage := range firefoxData["usage_global"].(map[string]interface{}) {
|
||||
data.Firefox = append(data.Firefox, BrowserVersion{
|
||||
Version: version,
|
||||
Global: usage.(float64),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if chromeData, ok := stats["chrome"].(map[string]interface{}); ok {
|
||||
for version, usage := range chromeData["usage_global"].(map[string]interface{}) {
|
||||
data.Chromium = append(data.Chromium, BrowserVersion{
|
||||
Version: version,
|
||||
Global: usage.(float64),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func getLatestBrowserVersions() (BrowserData, error) {
|
||||
browserCache.RLock()
|
||||
if time.Now().Before(browserCache.expires) {
|
||||
data := browserCache.data
|
||||
browserCache.RUnlock()
|
||||
return data, nil
|
||||
}
|
||||
browserCache.RUnlock()
|
||||
|
||||
data, err := fetchLatestBrowserVersions()
|
||||
if err != nil {
|
||||
return BrowserData{}, err
|
||||
}
|
||||
|
||||
browserCache.Lock()
|
||||
browserCache.data = data
|
||||
browserCache.expires = time.Now().Add(24 * time.Hour)
|
||||
browserCache.Unlock()
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func randomUserAgent() (string, error) {
|
||||
browsers, err := getLatestBrowserVersions()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
|
||||
// Simulated browser usage statistics (in percentages)
|
||||
usageStats := map[string]float64{
|
||||
"Firefox": 30.0,
|
||||
"Chromium": 70.0,
|
||||
}
|
||||
|
||||
// Calculate the probabilities for the versions
|
||||
probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
|
||||
|
||||
// Select a browser based on usage statistics
|
||||
browserType := ""
|
||||
randVal := rand.Float64() * 100
|
||||
cumulative := 0.0
|
||||
for browser, usage := range usageStats {
|
||||
cumulative += usage
|
||||
if randVal < cumulative {
|
||||
browserType = browser
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var versions []BrowserVersion
|
||||
switch browserType {
|
||||
case "Firefox":
|
||||
versions = browsers.Firefox
|
||||
case "Chromium":
|
||||
versions = browsers.Chromium
|
||||
}
|
||||
|
||||
if len(versions) == 0 {
|
||||
return "", fmt.Errorf("no versions found for browser: %s", browserType)
|
||||
}
|
||||
|
||||
// Sort versions by usage (descending order)
|
||||
sort.Slice(versions, func(i, j int) bool {
|
||||
return versions[i].Global > versions[j].Global
|
||||
})
|
||||
|
||||
// Select a version based on the probabilities
|
||||
version := ""
|
||||
randVal = rand.Float64()
|
||||
cumulative = 0.0
|
||||
for i, p := range probabilities {
|
||||
cumulative += p
|
||||
if randVal < cumulative && i < len(versions) {
|
||||
version = versions[i].Version
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if version == "" {
|
||||
version = versions[len(versions)-1].Version
|
||||
}
|
||||
|
||||
// Generate the user agent string
|
||||
userAgent := generateUserAgent(browserType, version)
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
func generateUserAgent(browser, version string) string {
|
||||
oses := []struct {
|
||||
os string
|
||||
probability float64
|
||||
}{
|
||||
{"Windows NT 10.0; Win64; x64", 44.0},
|
||||
{"Windows NT 11.0; Win64; x64", 44.0},
|
||||
{"X11; Linux x86_64", 1.0},
|
||||
{"X11; Ubuntu; Linux x86_64", 1.0},
|
||||
{"Macintosh; Intel Mac OS X 10_15_7", 10.0},
|
||||
}
|
||||
|
||||
// Select an OS based on probabilities
|
||||
randVal := rand.Float64() * 100
|
||||
cumulative := 0.0
|
||||
selectedOS := ""
|
||||
for _, os := range oses {
|
||||
cumulative += os.probability
|
||||
if randVal < cumulative {
|
||||
selectedOS = os.os
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
switch browser {
|
||||
case "Firefox":
|
||||
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
|
||||
case "Chromium":
|
||||
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func updateCachedUserAgents(newVersions BrowserData) {
|
||||
cache.Lock()
|
||||
defer cache.Unlock()
|
||||
for key, userAgent := range cache.data {
|
||||
randVal := rand.Float64()
|
||||
if randVal < 0.5 {
|
||||
updatedUserAgent := updateUserAgentVersion(userAgent, newVersions)
|
||||
cache.data[key] = updatedUserAgent
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
|
||||
// Parse the current user agent to extract browser and version
|
||||
var browserType, version string
|
||||
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||
browserType = "Chromium"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||
browserType = "Firefox"
|
||||
}
|
||||
|
||||
// Get the latest version for the browser type
|
||||
var latestVersion string
|
||||
if browserType == "Firefox" {
|
||||
latestVersion = newVersions.Firefox[0].Version
|
||||
} else if browserType == "Chromium" {
|
||||
latestVersion = newVersions.Chromium[0].Version
|
||||
}
|
||||
|
||||
// Update the user agent string with the new version
|
||||
return generateUserAgent(browserType, latestVersion)
|
||||
}
|
||||
|
||||
func periodicUpdate() {
|
||||
for {
|
||||
// Sleep for a random interval between 1 and 2 days
|
||||
time.Sleep(time.Duration(24+rand.Intn(24)) * time.Hour)
|
||||
|
||||
// Fetch the latest browser versions
|
||||
newVersions, err := fetchLatestBrowserVersions()
|
||||
if err != nil {
|
||||
printWarn("Error fetching latest browser versions: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Update the browser version cache
|
||||
browserCache.Lock()
|
||||
browserCache.data = newVersions
|
||||
browserCache.expires = time.Now().Add(24 * time.Hour)
|
||||
browserCache.Unlock()
|
||||
|
||||
// Update the cached user agents
|
||||
updateCachedUserAgents(newVersions)
|
||||
}
|
||||
}
|
||||
|
||||
func GetUserAgent(cacheKey string) (string, error) {
|
||||
cache.RLock()
|
||||
userAgent, found := cache.data[cacheKey]
|
||||
cache.RUnlock()
|
||||
|
||||
if found {
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
userAgent, err := randomUserAgent()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cache.Lock()
|
||||
cache.data[cacheKey] = userAgent
|
||||
cache.Unlock()
|
||||
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
func GetNewUserAgent(cacheKey string) (string, error) {
|
||||
userAgent, err := randomUserAgent()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cache.Lock()
|
||||
cache.data[cacheKey] = userAgent
|
||||
cache.Unlock()
|
||||
|
||||
return userAgent, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
go periodicUpdate()
|
||||
}
|
||||
|
||||
// func main() {
|
||||
// go periodicUpdate() // not needed here
|
||||
|
||||
// cacheKey := "image-search"
|
||||
// userAgent, err := GetUserAgent(cacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("Generated User Agent:", userAgent)
|
||||
|
||||
// // Request a new user agent for the same key
|
||||
// newUserAgent, err := GetNewUserAgent(cacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("New User Agent:", newUserAgent)
|
||||
|
||||
// AcacheKey := "image-search"
|
||||
// AuserAgent, err := GetUserAgent(AcacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("Generated User Agent:", AuserAgent)
|
||||
|
||||
// DcacheKey := "image-search"
|
||||
// DuserAgent, err := GetUserAgent(DcacheKey)
|
||||
// if err != nil {
|
||||
// fmt.Println("Error:", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// fmt.Println("Generated User Agent:", DuserAgent)
|
||||
|
||||
// }
|
||||
|
|
99
common.go
Normal file → Executable file
99
common.go
Normal file → Executable file
|
@ -1,50 +1,49 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"html/template"
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
funcs = template.FuncMap{
|
||||
"sub": func(a, b int) int {
|
||||
return a - b
|
||||
},
|
||||
"add": func(a, b int) int {
|
||||
return a + b
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func generateStrongRandomString(length int) string {
|
||||
bytes := make([]byte, length)
|
||||
_, err := rand.Read(bytes)
|
||||
if err != nil {
|
||||
log.Fatalf("Error generating random string: %v", err)
|
||||
}
|
||||
return base64.URLEncoding.EncodeToString(bytes)[:length]
|
||||
}
|
||||
|
||||
// Checks if the URL already includes a protocol
|
||||
func hasProtocol(url string) bool {
|
||||
return strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://")
|
||||
}
|
||||
|
||||
// Checks if the domain is a local address
|
||||
func isLocalAddress(domain string) bool {
|
||||
return domain == "localhost" || strings.HasPrefix(domain, "127.") || strings.HasPrefix(domain, "192.168.") || strings.HasPrefix(domain, "10.")
|
||||
}
|
||||
|
||||
// Ensures that HTTP or HTTPS is befor the adress if needed
|
||||
func addProtocol(domain string) string {
|
||||
if hasProtocol(domain) {
|
||||
return domain
|
||||
}
|
||||
if isLocalAddress(domain) {
|
||||
return "http://" + domain
|
||||
}
|
||||
return "https://" + domain
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"html/template"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
funcs = template.FuncMap{
|
||||
"sub": func(a, b int) int {
|
||||
return a - b
|
||||
},
|
||||
"add": func(a, b int) int {
|
||||
return a + b
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func generateStrongRandomString(length int) string {
|
||||
bytes := make([]byte, length)
|
||||
_, err := rand.Read(bytes)
|
||||
if err != nil {
|
||||
printErr("Error generating random string: %v", err)
|
||||
}
|
||||
return base64.URLEncoding.EncodeToString(bytes)[:length]
|
||||
}
|
||||
|
||||
// Checks if the URL already includes a protocol
|
||||
func hasProtocol(url string) bool {
|
||||
return strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://")
|
||||
}
|
||||
|
||||
// Checks if the domain is a local address
|
||||
func isLocalAddress(domain string) bool {
|
||||
return domain == "localhost" || strings.HasPrefix(domain, "127.") || strings.HasPrefix(domain, "192.168.") || strings.HasPrefix(domain, "10.")
|
||||
}
|
||||
|
||||
// Ensures that HTTP or HTTPS is befor the adress if needed
|
||||
func addProtocol(domain string) string {
|
||||
if hasProtocol(domain) {
|
||||
return domain
|
||||
}
|
||||
if isLocalAddress(domain) {
|
||||
return "http://" + domain
|
||||
}
|
||||
return "https://" + domain
|
||||
}
|
||||
|
|
515
files.go
Normal file → Executable file
515
files.go
Normal file → Executable file
|
@ -1,258 +1,257 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Settings struct {
|
||||
UxLang string
|
||||
Safe string
|
||||
}
|
||||
|
||||
type TorrentSite interface {
|
||||
Name() string
|
||||
Search(query string, category string) ([]TorrentResult, error)
|
||||
}
|
||||
|
||||
var (
|
||||
torrentGalaxy TorrentSite
|
||||
nyaa TorrentSite
|
||||
thePirateBay TorrentSite
|
||||
rutor TorrentSite
|
||||
)
|
||||
|
||||
var fileResultsChan = make(chan []TorrentResult)
|
||||
|
||||
func initializeTorrentSites() {
|
||||
torrentGalaxy = NewTorrentGalaxy()
|
||||
// nyaa = NewNyaa()
|
||||
thePirateBay = NewThePirateBay()
|
||||
// rutor = NewRutor()
|
||||
}
|
||||
|
||||
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query, safe, lang string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "file"}
|
||||
combinedResults := getFileResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
|
||||
|
||||
sort.Slice(combinedResults, func(i, j int) bool { return combinedResults[i].Seeders > combinedResults[j].Seeders })
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
funcMap := template.FuncMap{
|
||||
"sub": func(a, b int) int { return a - b },
|
||||
"add": func(a, b int) int { return a + b },
|
||||
}
|
||||
tmpl, err := template.New("files.html").Funcs(funcMap).ParseFiles("templates/files.html")
|
||||
if err != nil {
|
||||
log.Printf("Failed to load template: %v", err)
|
||||
http.Error(w, "Failed to load template", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Results []TorrentResult
|
||||
Query string
|
||||
Fetched string
|
||||
Category string
|
||||
Sort string
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
Page int
|
||||
Settings Settings
|
||||
Theme string
|
||||
}{
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Fetched: fmt.Sprintf("%.2f", elapsedTime.Seconds()),
|
||||
Category: "all",
|
||||
Sort: "seed",
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(combinedResults) > 0,
|
||||
Page: page,
|
||||
Settings: Settings{UxLang: lang, Safe: safe}, // Now this is painful, are there two Settings variables??
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
// // Debugging: Print results before rendering template
|
||||
// for _, result := range combinedResults {
|
||||
// fmt.Printf("Title: %s, Magnet: %s\n", result.Title, result.Magnet)
|
||||
// }
|
||||
|
||||
if err := tmpl.Execute(w, data); err != nil {
|
||||
log.Printf("Failed to render template: %v", err)
|
||||
http.Error(w, "Failed to render template", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TorrentResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []TorrentResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
log.Println("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
log.Println("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
_, torrentResults, _ := convertToSpecificResults(results)
|
||||
combinedResults = torrentResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
log.Println("Cache check timeout")
|
||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
||||
results := []TorrentResult{}
|
||||
|
||||
for _, site := range sites {
|
||||
if site == nil {
|
||||
continue
|
||||
}
|
||||
res, err := site.Search(query, "all")
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
for _, r := range res {
|
||||
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", prehaps usless now?
|
||||
results = append(results, r)
|
||||
}
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
log.Printf("No file results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func removeMagnetLink(magnet string) string {
|
||||
// Remove the magnet: prefix unconditionally
|
||||
return strings.TrimPrefix(magnet, "magnet:")
|
||||
}
|
||||
|
||||
func parseInt(s string) int {
|
||||
i, err := strconv.Atoi(s)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
func parseSize(sizeStr string) int64 {
|
||||
sizeStr = strings.TrimSpace(sizeStr)
|
||||
if sizeStr == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Use regex to extract numeric value and unit separately
|
||||
re := regexp.MustCompile(`(?i)([\d.]+)\s*([KMGT]?B)`)
|
||||
matches := re.FindStringSubmatch(sizeStr)
|
||||
if len(matches) < 3 {
|
||||
log.Printf("Error parsing size: invalid format %s", sizeStr)
|
||||
return 0
|
||||
}
|
||||
|
||||
sizeStr = matches[1]
|
||||
unit := strings.ToUpper(matches[2])
|
||||
|
||||
var multiplier int64 = 1
|
||||
switch unit {
|
||||
case "KB":
|
||||
multiplier = 1024
|
||||
case "MB":
|
||||
multiplier = 1024 * 1024
|
||||
case "GB":
|
||||
multiplier = 1024 * 1024 * 1024
|
||||
case "TB":
|
||||
multiplier = 1024 * 1024 * 1024 * 1024
|
||||
default:
|
||||
log.Printf("Unknown unit: %s", unit)
|
||||
return 0
|
||||
}
|
||||
|
||||
size, err := strconv.ParseFloat(sizeStr, 64)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing size: %v", err)
|
||||
return 0
|
||||
}
|
||||
return int64(size * float64(multiplier))
|
||||
}
|
||||
|
||||
// apparently this is needed so it can announce that magnet link is being used and people start seeding it, but I dont like the fact that I add trackers purposefully
|
||||
func applyTrackers(magnetLink string) string {
|
||||
if magnetLink == "" {
|
||||
return ""
|
||||
}
|
||||
trackers := []string{
|
||||
"udp://tracker.openbittorrent.com:80/announce",
|
||||
"udp://tracker.opentrackr.org:1337/announce",
|
||||
"udp://tracker.coppersurfer.tk:6969/announce",
|
||||
"udp://tracker.leechers-paradise.org:6969/announce",
|
||||
}
|
||||
for _, tracker := range trackers {
|
||||
magnetLink += "&tr=" + url.QueryEscape(tracker)
|
||||
}
|
||||
return magnetLink
|
||||
}
|
||||
|
||||
func formatSize(size int64) string {
|
||||
if size >= 1024*1024*1024*1024 {
|
||||
return fmt.Sprintf("%.2f TB", float64(size)/(1024*1024*1024*1024))
|
||||
} else if size >= 1024*1024*1024 {
|
||||
return fmt.Sprintf("%.2f GB", float64(size)/(1024*1024*1024))
|
||||
} else if size >= 1024*1024 {
|
||||
return fmt.Sprintf("%.2f MB", float64(size)/(1024*1024))
|
||||
} else if size >= 1024 {
|
||||
return fmt.Sprintf("%.2f KB", float64(size)/1024)
|
||||
}
|
||||
return fmt.Sprintf("%d B", size)
|
||||
}
|
||||
|
||||
func sanitizeFileName(name string) string {
|
||||
// Replace spaces with dashes
|
||||
sanitized := regexp.MustCompile(`\s+`).ReplaceAllString(name, "-")
|
||||
// Remove any characters that are not alphanumeric, dashes, or parentheses
|
||||
sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "")
|
||||
return sanitized
|
||||
}
|
||||
|
||||
func contains(slice []string, item string) bool {
|
||||
for _, v := range slice {
|
||||
if v == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Settings struct {
|
||||
UxLang string
|
||||
Safe string
|
||||
}
|
||||
|
||||
type TorrentSite interface {
|
||||
Name() string
|
||||
Search(query string, category string) ([]TorrentResult, error)
|
||||
}
|
||||
|
||||
var (
|
||||
torrentGalaxy TorrentSite
|
||||
nyaa TorrentSite
|
||||
thePirateBay TorrentSite
|
||||
rutor TorrentSite
|
||||
)
|
||||
|
||||
var fileResultsChan = make(chan []TorrentResult)
|
||||
|
||||
func initializeTorrentSites() {
|
||||
torrentGalaxy = NewTorrentGalaxy()
|
||||
// nyaa = NewNyaa()
|
||||
thePirateBay = NewThePirateBay()
|
||||
// rutor = NewRutor()
|
||||
}
|
||||
|
||||
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "true", Lang: settings.Language, Type: "file"}
|
||||
combinedResults := getFileResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.Language, page)
|
||||
|
||||
sort.Slice(combinedResults, func(i, j int) bool { return combinedResults[i].Seeders > combinedResults[j].Seeders })
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
funcMap := template.FuncMap{
|
||||
"sub": func(a, b int) int { return a - b },
|
||||
"add": func(a, b int) int { return a + b },
|
||||
}
|
||||
tmpl, err := template.New("files.html").Funcs(funcMap).ParseFiles("templates/files.html")
|
||||
if err != nil {
|
||||
printErr("Failed to load template: %v", err)
|
||||
http.Error(w, "Failed to load template", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Results []TorrentResult
|
||||
Query string
|
||||
Fetched string
|
||||
Category string
|
||||
Sort string
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
Page int
|
||||
Settings Settings
|
||||
Theme string
|
||||
}{
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Fetched: fmt.Sprintf("%.2f", elapsedTime.Seconds()),
|
||||
Category: "all",
|
||||
Sort: "seed",
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(combinedResults) > 0,
|
||||
Page: page,
|
||||
Settings: Settings{UxLang: settings.Language, Safe: settings.SafeSearch}, // Now this is painful, are there two Settings variables??
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
// // Debugging: Print results before rendering template
|
||||
// for _, result := range combinedResults {
|
||||
// fmt.Printf("Title: %s, Magnet: %s\n", result.Title, result.Magnet)
|
||||
// }
|
||||
|
||||
if err := tmpl.Execute(w, data); err != nil {
|
||||
printErr("Failed to render template: %v", err)
|
||||
http.Error(w, "Failed to render template", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TorrentResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []TorrentResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
printInfo("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
printInfo("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
_, torrentResults, _ := convertToSpecificResults(results)
|
||||
combinedResults = torrentResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printInfo("Cache check timeout")
|
||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
||||
results := []TorrentResult{}
|
||||
|
||||
for _, site := range sites {
|
||||
if site == nil {
|
||||
continue
|
||||
}
|
||||
res, err := site.Search(query, "all")
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
for _, r := range res {
|
||||
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", prehaps usless now?
|
||||
results = append(results, r)
|
||||
}
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
printWarn("No file results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func removeMagnetLink(magnet string) string {
|
||||
// Remove the magnet: prefix unconditionally
|
||||
return strings.TrimPrefix(magnet, "magnet:")
|
||||
}
|
||||
|
||||
func parseInt(s string) int {
|
||||
i, err := strconv.Atoi(s)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
func parseSize(sizeStr string) int64 {
|
||||
sizeStr = strings.TrimSpace(sizeStr)
|
||||
if sizeStr == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Use regex to extract numeric value and unit separately
|
||||
re := regexp.MustCompile(`(?i)([\d.]+)\s*([KMGT]?B)`)
|
||||
matches := re.FindStringSubmatch(sizeStr)
|
||||
if len(matches) < 3 {
|
||||
printWarn("Error parsing size: invalid format %s", sizeStr)
|
||||
return 0
|
||||
}
|
||||
|
||||
sizeStr = matches[1]
|
||||
unit := strings.ToUpper(matches[2])
|
||||
|
||||
var multiplier int64 = 1
|
||||
switch unit {
|
||||
case "KB":
|
||||
multiplier = 1024
|
||||
case "MB":
|
||||
multiplier = 1024 * 1024
|
||||
case "GB":
|
||||
multiplier = 1024 * 1024 * 1024
|
||||
case "TB":
|
||||
multiplier = 1024 * 1024 * 1024 * 1024
|
||||
default:
|
||||
printWarn("Unknown unit: %s", unit)
|
||||
return 0
|
||||
}
|
||||
|
||||
size, err := strconv.ParseFloat(sizeStr, 64)
|
||||
if err != nil {
|
||||
printWarn("Error parsing size: %v", err)
|
||||
return 0
|
||||
}
|
||||
return int64(size * float64(multiplier))
|
||||
}
|
||||
|
||||
// apparently this is needed so it can announce that magnet link is being used and people start seeding it, but I dont like the fact that I add trackers purposefully
|
||||
func applyTrackers(magnetLink string) string {
|
||||
if magnetLink == "" {
|
||||
return ""
|
||||
}
|
||||
trackers := []string{
|
||||
"udp://tracker.openbittorrent.com:80/announce",
|
||||
"udp://tracker.opentrackr.org:1337/announce",
|
||||
"udp://tracker.coppersurfer.tk:6969/announce",
|
||||
"udp://tracker.leechers-paradise.org:6969/announce",
|
||||
}
|
||||
for _, tracker := range trackers {
|
||||
magnetLink += "&tr=" + url.QueryEscape(tracker)
|
||||
}
|
||||
return magnetLink
|
||||
}
|
||||
|
||||
func formatSize(size int64) string {
|
||||
if size >= 1024*1024*1024*1024 {
|
||||
return fmt.Sprintf("%.2f TB", float64(size)/(1024*1024*1024*1024))
|
||||
} else if size >= 1024*1024*1024 {
|
||||
return fmt.Sprintf("%.2f GB", float64(size)/(1024*1024*1024))
|
||||
} else if size >= 1024*1024 {
|
||||
return fmt.Sprintf("%.2f MB", float64(size)/(1024*1024))
|
||||
} else if size >= 1024 {
|
||||
return fmt.Sprintf("%.2f KB", float64(size)/1024)
|
||||
}
|
||||
return fmt.Sprintf("%d B", size)
|
||||
}
|
||||
|
||||
func sanitizeFileName(name string) string {
|
||||
// Replace spaces with dashes
|
||||
sanitized := regexp.MustCompile(`\s+`).ReplaceAllString(name, "-")
|
||||
// Remove any characters that are not alphanumeric, dashes, or parentheses
|
||||
sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "")
|
||||
return sanitized
|
||||
}
|
||||
|
||||
func contains(slice []string, item string) bool {
|
||||
for _, v := range slice {
|
||||
if v == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
284
forums.go
Normal file → Executable file
284
forums.go
Normal file → Executable file
|
@ -1,142 +1,142 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
||||
const (
|
||||
pageSize = 25
|
||||
baseURL = "https://www.reddit.com"
|
||||
maxRetries = 5
|
||||
initialBackoff = 2 * time.Second
|
||||
)
|
||||
var results []ForumSearchResult
|
||||
|
||||
searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
|
||||
var resp *http.Response
|
||||
var err error
|
||||
|
||||
// Retry logic with exponential backoff
|
||||
for i := 0; i <= maxRetries; i++ {
|
||||
resp, err = http.Get(searchURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %v", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusTooManyRequests {
|
||||
break
|
||||
}
|
||||
|
||||
// Wait for some time before retrying
|
||||
backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
|
||||
time.Sleep(backoff)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var searchResults map[string]interface{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
|
||||
return nil, fmt.Errorf("decoding response: %v", err)
|
||||
}
|
||||
|
||||
data, ok := searchResults["data"].(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no data field in response")
|
||||
}
|
||||
|
||||
posts, ok := data["children"].([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no children field in data")
|
||||
}
|
||||
|
||||
for _, post := range posts {
|
||||
postData := post.(map[string]interface{})["data"].(map[string]interface{})
|
||||
|
||||
if safe == "active" && postData["over_18"].(bool) {
|
||||
continue
|
||||
}
|
||||
|
||||
header := postData["title"].(string)
|
||||
description := postData["selftext"].(string)
|
||||
if len(description) > 500 {
|
||||
description = description[:500] + "..."
|
||||
}
|
||||
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
|
||||
permalink := postData["permalink"].(string)
|
||||
resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
|
||||
|
||||
result := ForumSearchResult{
|
||||
URL: resultURL,
|
||||
Header: header,
|
||||
Description: description,
|
||||
PublishedDate: publishedDate,
|
||||
}
|
||||
|
||||
thumbnail := postData["thumbnail"].(string)
|
||||
if parsedURL, err := url.Parse(thumbnail); err == nil && parsedURL.Scheme != "" {
|
||||
result.ImgSrc = postData["url"].(string)
|
||||
result.ThumbnailSrc = thumbnail
|
||||
}
|
||||
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query, safe, lang string, page int) {
|
||||
results, err := PerformRedditSearch(query, safe, page)
|
||||
if err != nil || len(results) == 0 { // 0 == 0 to force search by other node
|
||||
log.Printf("No results from primary search, trying other nodes")
|
||||
results = tryOtherNodesForForumSearch(query, safe, lang, page)
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Query string
|
||||
Results []ForumSearchResult
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
Page int
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
Theme string
|
||||
}{
|
||||
Query: query,
|
||||
Results: results,
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: lang,
|
||||
Page: page,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(results) == 25,
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
funcMap := template.FuncMap{
|
||||
"sub": func(a, b int) int { return a - b },
|
||||
"add": func(a, b int) int { return a + b },
|
||||
}
|
||||
|
||||
tmpl, err := template.New("forums.html").Funcs(funcMap).ParseFiles("templates/forums.html")
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error loading template: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err := tmpl.Execute(w, data); err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error rendering template: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
||||
const (
|
||||
pageSize = 25
|
||||
baseURL = "https://www.reddit.com"
|
||||
maxRetries = 5
|
||||
initialBackoff = 2 * time.Second
|
||||
)
|
||||
var results []ForumSearchResult
|
||||
|
||||
searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
|
||||
var resp *http.Response
|
||||
var err error
|
||||
|
||||
// Retry logic with exponential backoff
|
||||
for i := 0; i <= maxRetries; i++ {
|
||||
resp, err = http.Get(searchURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %v", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusTooManyRequests {
|
||||
break
|
||||
}
|
||||
|
||||
// Wait for some time before retrying
|
||||
backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
|
||||
time.Sleep(backoff)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var searchResults map[string]interface{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
|
||||
return nil, fmt.Errorf("decoding response: %v", err)
|
||||
}
|
||||
|
||||
data, ok := searchResults["data"].(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no data field in response")
|
||||
}
|
||||
|
||||
posts, ok := data["children"].([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no children field in data")
|
||||
}
|
||||
|
||||
for _, post := range posts {
|
||||
postData := post.(map[string]interface{})["data"].(map[string]interface{})
|
||||
|
||||
if safe == "active" && postData["over_18"].(bool) {
|
||||
continue
|
||||
}
|
||||
|
||||
header := postData["title"].(string)
|
||||
description := postData["selftext"].(string)
|
||||
if len(description) > 500 {
|
||||
description = description[:500] + "..."
|
||||
}
|
||||
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
|
||||
permalink := postData["permalink"].(string)
|
||||
resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
|
||||
|
||||
result := ForumSearchResult{
|
||||
URL: resultURL,
|
||||
Header: header,
|
||||
Description: description,
|
||||
PublishedDate: publishedDate,
|
||||
}
|
||||
|
||||
thumbnail := postData["thumbnail"].(string)
|
||||
if parsedURL, err := url.Parse(thumbnail); err == nil && parsedURL.Scheme != "" {
|
||||
result.ImgSrc = postData["url"].(string)
|
||||
result.ThumbnailSrc = thumbnail
|
||||
}
|
||||
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||
results, err := PerformRedditSearch(query, settings.SafeSearch, page)
|
||||
if err != nil || len(results) == 0 { // 0 == 0 to force search by other node
|
||||
log.Printf("No results from primary search, trying other nodes")
|
||||
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.Language, page)
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Query string
|
||||
Results []ForumSearchResult
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
Page int
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
Theme string
|
||||
}{
|
||||
Query: query,
|
||||
Results: results,
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: settings.Language,
|
||||
Page: page,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(results) == 25,
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
funcMap := template.FuncMap{
|
||||
"sub": func(a, b int) int { return a - b },
|
||||
"add": func(a, b int) int { return a + b },
|
||||
}
|
||||
|
||||
tmpl, err := template.New("forums.html").Funcs(funcMap).ParseFiles("templates/forums.html")
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error loading template: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err := tmpl.Execute(w, data); err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error rendering template: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
|
294
images.go
Normal file → Executable file
294
images.go
Normal file → Executable file
|
@ -1,147 +1,147 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
var imageSearchEngines []SearchEngine
|
||||
|
||||
func init() {
|
||||
imageSearchEngines = []SearchEngine{
|
||||
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch), Weight: 1},
|
||||
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch), Weight: 2}, // Bing sometimes returns with low amount of images, this leads to danamica page loading not working
|
||||
{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 3},
|
||||
}
|
||||
}
|
||||
|
||||
func handleImageSearch(w http.ResponseWriter, settings UserSettings, query, safe, lang string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "image"}
|
||||
combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
tmpl, err := template.New("images.html").Funcs(funcs).ParseFiles("templates/images.html")
|
||||
if err != nil {
|
||||
log.Printf("Error parsing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Results []ImageSearchResult
|
||||
Query string
|
||||
Page int
|
||||
Fetched string
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
NoResults bool
|
||||
Theme string
|
||||
}{
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Page: page,
|
||||
Fetched: fmt.Sprintf("%.2f seconds", elapsedTime.Seconds()),
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: lang,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(combinedResults) >= 50,
|
||||
NoResults: len(combinedResults) == 0,
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
err = tmpl.Execute(w, data)
|
||||
if err != nil {
|
||||
log.Printf("Error executing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ImageSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []ImageSearchResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
log.Println("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
log.Println("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
_, _, imageResults := convertToSpecificResults(results)
|
||||
combinedResults = imageResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
log.Println("Cache check timeout")
|
||||
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||
var results []ImageSearchResult
|
||||
|
||||
for _, engine := range imageSearchEngines {
|
||||
log.Printf("Using image search engine: %s", engine.Name)
|
||||
|
||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
||||
updateEngineMetrics(&engine, duration, err == nil)
|
||||
if err != nil {
|
||||
log.Printf("Error performing image search with %s: %v", engine.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, result := range searchResults {
|
||||
results = append(results, result.(ImageSearchResult))
|
||||
}
|
||||
|
||||
// If results are found, break out of the loop
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no results found after trying all engines
|
||||
if len(results) == 0 {
|
||||
log.Printf("No image results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForImageSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
||||
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
||||
imageResults, duration, err := f(query, safe, lang, page)
|
||||
if err != nil {
|
||||
return nil, duration, err
|
||||
}
|
||||
searchResults := make([]SearchResult, len(imageResults))
|
||||
for i, result := range imageResults {
|
||||
searchResults[i] = result
|
||||
}
|
||||
return searchResults, duration, nil
|
||||
}
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
var imageSearchEngines []SearchEngine
|
||||
|
||||
func init() {
|
||||
imageSearchEngines = []SearchEngine{
|
||||
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch), Weight: 1},
|
||||
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch), Weight: 2}, // Bing sometimes returns with low amount of images, this leads to danamica page loading not working
|
||||
{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 3},
|
||||
}
|
||||
}
|
||||
|
||||
func handleImageSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "true", Lang: settings.Language, Type: "image"}
|
||||
combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.Language, page)
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
tmpl, err := template.New("images.html").Funcs(funcs).ParseFiles("templates/images.html")
|
||||
if err != nil {
|
||||
log.Printf("Error parsing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Results []ImageSearchResult
|
||||
Query string
|
||||
Page int
|
||||
Fetched string
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
NoResults bool
|
||||
Theme string
|
||||
}{
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Page: page,
|
||||
Fetched: fmt.Sprintf("%.2f seconds", elapsedTime.Seconds()),
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: settings.Language,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(combinedResults) >= 50,
|
||||
NoResults: len(combinedResults) == 0,
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
err = tmpl.Execute(w, data)
|
||||
if err != nil {
|
||||
printErr("Error executing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ImageSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []ImageSearchResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
printInfo("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
printInfo("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
_, _, imageResults := convertToSpecificResults(results)
|
||||
combinedResults = imageResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printInfo("Cache check timeout")
|
||||
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||
var results []ImageSearchResult
|
||||
|
||||
for _, engine := range imageSearchEngines {
|
||||
printInfo("Using image search engine: %s", engine.Name)
|
||||
|
||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
||||
updateEngineMetrics(&engine, duration, err == nil)
|
||||
if err != nil {
|
||||
printWarn("Error performing image search with %s: %v", engine.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, result := range searchResults {
|
||||
results = append(results, result.(ImageSearchResult))
|
||||
}
|
||||
|
||||
// If results are found, break out of the loop
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no results found after trying all engines
|
||||
if len(results) == 0 {
|
||||
printWarn("No image results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForImageSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
||||
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
||||
imageResults, duration, err := f(query, safe, lang, page)
|
||||
if err != nil {
|
||||
return nil, duration, err
|
||||
}
|
||||
searchResults := make([]SearchResult, len(imageResults))
|
||||
for i, result := range imageResults {
|
||||
searchResults[i] = result
|
||||
}
|
||||
return searchResults, duration, nil
|
||||
}
|
||||
}
|
||||
|
|
355
main.go
Normal file → Executable file
355
main.go
Normal file → Executable file
|
@ -1,178 +1,177 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// LanguageOption represents a language option for search
|
||||
type LanguageOption struct {
|
||||
Code string
|
||||
Name string
|
||||
}
|
||||
|
||||
var settings UserSettings
|
||||
|
||||
var languageOptions = []LanguageOption{
|
||||
{Code: "", Name: "Any Language"},
|
||||
{Code: "lang_en", Name: "English"},
|
||||
{Code: "lang_af", Name: "Afrikaans"},
|
||||
{Code: "lang_ar", Name: "العربية (Arabic)"},
|
||||
{Code: "lang_hy", Name: "Հայերեն (Armenian)"},
|
||||
{Code: "lang_be", Name: "Беларуская (Belarusian)"},
|
||||
{Code: "lang_bg", Name: "български (Bulgarian)"},
|
||||
{Code: "lang_ca", Name: "Català (Catalan)"},
|
||||
{Code: "lang_zh-CN", Name: "中文 (简体) (Chinese Simplified)"},
|
||||
{Code: "lang_zh-TW", Name: "中文 (繁體) (Chinese Traditional)"},
|
||||
{Code: "lang_hr", Name: "Hrvatski (Croatian)"},
|
||||
{Code: "lang_cs", Name: "Čeština (Czech)"},
|
||||
{Code: "lang_da", Name: "Dansk (Danish)"},
|
||||
{Code: "lang_nl", Name: "Nederlands (Dutch)"},
|
||||
{Code: "lang_eo", Name: "Esperanto"},
|
||||
{Code: "lang_et", Name: "Eesti (Estonian)"},
|
||||
{Code: "lang_tl", Name: "Filipino (Tagalog)"},
|
||||
{Code: "lang_fi", Name: "Suomi (Finnish)"},
|
||||
{Code: "lang_fr", Name: "Français (French)"},
|
||||
{Code: "lang_de", Name: "Deutsch (German)"},
|
||||
{Code: "lang_el", Name: "Ελληνικά (Greek)"},
|
||||
{Code: "lang_iw", Name: "עברית (Hebrew)"},
|
||||
{Code: "lang_hi", Name: "हिन्दी (Hindi)"},
|
||||
{Code: "lang_hu", Name: "magyar (Hungarian)"},
|
||||
{Code: "lang_is", Name: "íslenska (Icelandic)"},
|
||||
{Code: "lang_id", Name: "Bahasa Indonesia (Indonesian)"},
|
||||
{Code: "lang_it", Name: "italiano (Italian)"},
|
||||
{Code: "lang_ja", Name: "日本語 (Japanese)"},
|
||||
{Code: "lang_ko", Name: "한국어 (Korean)"},
|
||||
{Code: "lang_lv", Name: "latviešu (Latvian)"},
|
||||
{Code: "lang_lt", Name: "lietuvių (Lithuanian)"},
|
||||
{Code: "lang_no", Name: "norsk (Norwegian)"},
|
||||
{Code: "lang_fa", Name: "فارسی (Persian)"},
|
||||
{Code: "lang_pl", Name: "polski (Polish)"},
|
||||
{Code: "lang_pt", Name: "português (Portuguese)"},
|
||||
{Code: "lang_ro", Name: "română (Romanian)"},
|
||||
{Code: "lang_ru", Name: "русский (Russian)"},
|
||||
{Code: "lang_sr", Name: "српски (Serbian)"},
|
||||
{Code: "lang_sk", Name: "slovenčina (Slovak)"},
|
||||
{Code: "lang_sl", Name: "slovenščina (Slovenian)"},
|
||||
{Code: "lang_es", Name: "español (Spanish)"},
|
||||
{Code: "lang_sw", Name: "Kiswahili (Swahili)"},
|
||||
{Code: "lang_sv", Name: "svenska (Swedish)"},
|
||||
{Code: "lang_th", Name: "ไทย (Thai)"},
|
||||
{Code: "lang_tr", Name: "Türkçe (Turkish)"},
|
||||
{Code: "lang_uk", Name: "українська (Ukrainian)"},
|
||||
{Code: "lang_vi", Name: "Tiếng Việt (Vietnamese)"},
|
||||
}
|
||||
|
||||
func handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||
query, safe, lang, searchType, page := parseSearchParams(r)
|
||||
|
||||
// Load user settings
|
||||
settings = loadUserSettings(r)
|
||||
|
||||
// Update the theme, safe search, and language based on query parameters or use existing settings
|
||||
theme := r.URL.Query().Get("theme")
|
||||
if theme != "" {
|
||||
settings.Theme = theme
|
||||
saveUserSettings(w, settings)
|
||||
} else if settings.Theme == "" {
|
||||
settings.Theme = "dark" // Default theme
|
||||
}
|
||||
|
||||
if safe != "" {
|
||||
settings.SafeSearch = safe
|
||||
saveUserSettings(w, settings)
|
||||
}
|
||||
|
||||
if lang != "" {
|
||||
settings.Language = lang
|
||||
saveUserSettings(w, settings)
|
||||
}
|
||||
|
||||
// Render the search page template if no query
|
||||
if query == "" {
|
||||
tmpl := template.Must(template.ParseFiles("templates/search.html"))
|
||||
tmpl.Execute(w, settings)
|
||||
return
|
||||
}
|
||||
|
||||
settings := loadUserSettings(r)
|
||||
|
||||
// Handle search based on the type
|
||||
switch searchType {
|
||||
case "image":
|
||||
handleImageSearch(w, settings, query, safe, lang, page)
|
||||
case "video":
|
||||
handleVideoSearch(w, settings, query, safe, lang, page)
|
||||
case "map":
|
||||
handleMapSearch(w, settings, query, safe)
|
||||
case "forum":
|
||||
handleForumsSearch(w, settings, query, safe, lang, page)
|
||||
case "file":
|
||||
handleFileSearch(w, settings, query, safe, lang, page)
|
||||
case "text":
|
||||
fallthrough
|
||||
default:
|
||||
HandleTextSearch(w, settings, query, safe, lang, page)
|
||||
}
|
||||
// This is immeasurably stupid it passes safe and language then it passes settings with safe and lang again
|
||||
}
|
||||
|
||||
func parseSearchParams(r *http.Request) (query, safe, lang, searchType string, page int) {
|
||||
if r.Method == "GET" {
|
||||
query = r.URL.Query().Get("q")
|
||||
safe = r.URL.Query().Get("safe")
|
||||
lang = r.URL.Query().Get("lang")
|
||||
searchType = r.URL.Query().Get("t")
|
||||
pageStr := r.URL.Query().Get("p")
|
||||
page = parsePageParameter(pageStr)
|
||||
} else if r.Method == "POST" {
|
||||
query = r.FormValue("q")
|
||||
safe = r.FormValue("safe")
|
||||
lang = r.FormValue("lang")
|
||||
searchType = r.FormValue("t")
|
||||
pageStr := r.FormValue("p")
|
||||
page = parsePageParameter(pageStr)
|
||||
}
|
||||
|
||||
if searchType == "" {
|
||||
searchType = "text"
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func parsePageParameter(pageStr string) int {
|
||||
page, err := strconv.Atoi(pageStr)
|
||||
if err != nil || page < 1 {
|
||||
page = 1
|
||||
}
|
||||
return page
|
||||
}
|
||||
|
||||
func runServer() {
|
||||
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
||||
http.HandleFunc("/", handleSearch)
|
||||
http.HandleFunc("/search", handleSearch)
|
||||
http.HandleFunc("/img_proxy", handleImageProxy)
|
||||
http.HandleFunc("/node", handleNodeRequest)
|
||||
http.HandleFunc("/settings", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, "templates/settings.html")
|
||||
})
|
||||
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
||||
http.ServeFile(w, r, "static/opensearch.xml")
|
||||
})
|
||||
initializeTorrentSites()
|
||||
|
||||
config := loadConfig()
|
||||
generateOpenSearchXML(config)
|
||||
|
||||
printMessage("Server is listening on http://localhost:%d", config.Port)
|
||||
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
|
||||
|
||||
// Start automatic update checker
|
||||
go checkForUpdates()
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// LanguageOption represents a language option for search
|
||||
type LanguageOption struct {
|
||||
Code string
|
||||
Name string
|
||||
}
|
||||
|
||||
var settings UserSettings
|
||||
|
||||
var languageOptions = []LanguageOption{
|
||||
{Code: "", Name: "Any Language"},
|
||||
{Code: "lang_en", Name: "English"},
|
||||
{Code: "lang_af", Name: "Afrikaans"},
|
||||
{Code: "lang_ar", Name: "العربية (Arabic)"},
|
||||
{Code: "lang_hy", Name: "Հայերեն (Armenian)"},
|
||||
{Code: "lang_be", Name: "Беларуская (Belarusian)"},
|
||||
{Code: "lang_bg", Name: "български (Bulgarian)"},
|
||||
{Code: "lang_ca", Name: "Català (Catalan)"},
|
||||
{Code: "lang_zh-CN", Name: "中文 (简体) (Chinese Simplified)"},
|
||||
{Code: "lang_zh-TW", Name: "中文 (繁體) (Chinese Traditional)"},
|
||||
{Code: "lang_hr", Name: "Hrvatski (Croatian)"},
|
||||
{Code: "lang_cs", Name: "Čeština (Czech)"},
|
||||
{Code: "lang_da", Name: "Dansk (Danish)"},
|
||||
{Code: "lang_nl", Name: "Nederlands (Dutch)"},
|
||||
{Code: "lang_eo", Name: "Esperanto"},
|
||||
{Code: "lang_et", Name: "Eesti (Estonian)"},
|
||||
{Code: "lang_tl", Name: "Filipino (Tagalog)"},
|
||||
{Code: "lang_fi", Name: "Suomi (Finnish)"},
|
||||
{Code: "lang_fr", Name: "Français (French)"},
|
||||
{Code: "lang_de", Name: "Deutsch (German)"},
|
||||
{Code: "lang_el", Name: "Ελληνικά (Greek)"},
|
||||
{Code: "lang_iw", Name: "עברית (Hebrew)"},
|
||||
{Code: "lang_hi", Name: "हिन्दी (Hindi)"},
|
||||
{Code: "lang_hu", Name: "magyar (Hungarian)"},
|
||||
{Code: "lang_is", Name: "íslenska (Icelandic)"},
|
||||
{Code: "lang_id", Name: "Bahasa Indonesia (Indonesian)"},
|
||||
{Code: "lang_it", Name: "italiano (Italian)"},
|
||||
{Code: "lang_ja", Name: "日本語 (Japanese)"},
|
||||
{Code: "lang_ko", Name: "한국어 (Korean)"},
|
||||
{Code: "lang_lv", Name: "latviešu (Latvian)"},
|
||||
{Code: "lang_lt", Name: "lietuvių (Lithuanian)"},
|
||||
{Code: "lang_no", Name: "norsk (Norwegian)"},
|
||||
{Code: "lang_fa", Name: "فارسی (Persian)"},
|
||||
{Code: "lang_pl", Name: "polski (Polish)"},
|
||||
{Code: "lang_pt", Name: "português (Portuguese)"},
|
||||
{Code: "lang_ro", Name: "română (Romanian)"},
|
||||
{Code: "lang_ru", Name: "русский (Russian)"},
|
||||
{Code: "lang_sr", Name: "српски (Serbian)"},
|
||||
{Code: "lang_sk", Name: "slovenčina (Slovak)"},
|
||||
{Code: "lang_sl", Name: "slovenščina (Slovenian)"},
|
||||
{Code: "lang_es", Name: "español (Spanish)"},
|
||||
{Code: "lang_sw", Name: "Kiswahili (Swahili)"},
|
||||
{Code: "lang_sv", Name: "svenska (Swedish)"},
|
||||
{Code: "lang_th", Name: "ไทย (Thai)"},
|
||||
{Code: "lang_tr", Name: "Türkçe (Turkish)"},
|
||||
{Code: "lang_uk", Name: "українська (Ukrainian)"},
|
||||
{Code: "lang_vi", Name: "Tiếng Việt (Vietnamese)"},
|
||||
}
|
||||
|
||||
func handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||
query, safe, lang, searchType, page := parseSearchParams(r)
|
||||
|
||||
// Load user settings
|
||||
settings = loadUserSettings(r)
|
||||
|
||||
// Update the theme, safe search, and language based on query parameters or use existing settings
|
||||
theme := r.URL.Query().Get("theme")
|
||||
if theme != "" {
|
||||
settings.Theme = theme
|
||||
saveUserSettings(w, settings)
|
||||
} else if settings.Theme == "" {
|
||||
settings.Theme = "dark" // Default theme
|
||||
}
|
||||
|
||||
if safe != "" {
|
||||
settings.SafeSearch = safe
|
||||
saveUserSettings(w, settings)
|
||||
}
|
||||
|
||||
if lang != "" {
|
||||
settings.Language = lang
|
||||
saveUserSettings(w, settings)
|
||||
}
|
||||
|
||||
// Render the search page template if no query
|
||||
if query == "" {
|
||||
tmpl := template.Must(template.ParseFiles("templates/search.html"))
|
||||
tmpl.Execute(w, settings)
|
||||
return
|
||||
}
|
||||
|
||||
settings := loadUserSettings(r)
|
||||
|
||||
// Handle search based on the type
|
||||
switch searchType {
|
||||
case "image":
|
||||
handleImageSearch(w, settings, query, page)
|
||||
case "video":
|
||||
handleVideoSearch(w, settings, query, page)
|
||||
case "map":
|
||||
handleMapSearch(w, settings, query)
|
||||
case "forum":
|
||||
handleForumsSearch(w, settings, query, page)
|
||||
case "file":
|
||||
handleFileSearch(w, settings, query, page)
|
||||
case "text":
|
||||
fallthrough
|
||||
default:
|
||||
HandleTextSearch(w, settings, query, page)
|
||||
}
|
||||
}
|
||||
|
||||
func parseSearchParams(r *http.Request) (query, safe, lang, searchType string, page int) {
|
||||
if r.Method == "GET" {
|
||||
query = r.URL.Query().Get("q")
|
||||
safe = r.URL.Query().Get("safe")
|
||||
lang = r.URL.Query().Get("lang")
|
||||
searchType = r.URL.Query().Get("t")
|
||||
pageStr := r.URL.Query().Get("p")
|
||||
page = parsePageParameter(pageStr)
|
||||
} else if r.Method == "POST" {
|
||||
query = r.FormValue("q")
|
||||
safe = r.FormValue("safe")
|
||||
lang = r.FormValue("lang")
|
||||
searchType = r.FormValue("t")
|
||||
pageStr := r.FormValue("p")
|
||||
page = parsePageParameter(pageStr)
|
||||
}
|
||||
|
||||
if searchType == "" {
|
||||
searchType = "text"
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func parsePageParameter(pageStr string) int {
|
||||
page, err := strconv.Atoi(pageStr)
|
||||
if err != nil || page < 1 {
|
||||
page = 1
|
||||
}
|
||||
return page
|
||||
}
|
||||
|
||||
func runServer() {
|
||||
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
||||
http.HandleFunc("/", handleSearch)
|
||||
http.HandleFunc("/search", handleSearch)
|
||||
http.HandleFunc("/img_proxy", handleImageProxy)
|
||||
http.HandleFunc("/node", handleNodeRequest)
|
||||
http.HandleFunc("/settings", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, "templates/settings.html")
|
||||
})
|
||||
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
||||
http.ServeFile(w, r, "static/opensearch.xml")
|
||||
})
|
||||
initializeTorrentSites()
|
||||
|
||||
config := loadConfig()
|
||||
generateOpenSearchXML(config)
|
||||
|
||||
printMessage("Server is listening on http://localhost:%d", config.Port)
|
||||
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
|
||||
|
||||
// Start automatic update checker
|
||||
go checkForUpdates()
|
||||
}
|
||||
|
|
144
map.go
Normal file → Executable file
144
map.go
Normal file → Executable file
|
@ -1,72 +1,72 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type NominatimResponse struct {
|
||||
Lat string `json:"lat"`
|
||||
Lon string `json:"lon"`
|
||||
}
|
||||
|
||||
func geocodeQuery(query string) (latitude, longitude string, found bool, err error) {
|
||||
// URL encode the query
|
||||
query = url.QueryEscape(query)
|
||||
|
||||
// Construct the request URL
|
||||
urlString := fmt.Sprintf("https://nominatim.openstreetmap.org/search?format=json&q=%s", query)
|
||||
|
||||
// Make the HTTP GET request
|
||||
resp, err := http.Get(urlString)
|
||||
if err != nil {
|
||||
return "", "", false, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Read the response
|
||||
var result []NominatimResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", "", false, err
|
||||
}
|
||||
|
||||
// Check if there are any results
|
||||
if len(result) > 0 {
|
||||
latitude = result[0].Lat
|
||||
longitude = result[0].Lon
|
||||
return latitude, longitude, true, nil
|
||||
}
|
||||
|
||||
return "", "", false, nil
|
||||
}
|
||||
|
||||
func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string, lang string) {
|
||||
// Geocode the query to get coordinates
|
||||
latitude, longitude, found, err := geocodeQuery(query)
|
||||
if err != nil {
|
||||
printDebug("Error geocoding query: %s, error: %v", query, err)
|
||||
http.Error(w, "Failed to find location", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Use a template to serve the map page
|
||||
data := map[string]interface{}{
|
||||
"Query": query,
|
||||
"Latitude": latitude,
|
||||
"Longitude": longitude,
|
||||
"Found": found,
|
||||
"Theme": settings.Theme,
|
||||
}
|
||||
|
||||
tmpl, err := template.ParseFiles("templates/map.html")
|
||||
if err != nil {
|
||||
printErr("Error loading map template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
tmpl.Execute(w, data)
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type NominatimResponse struct {
|
||||
Lat string `json:"lat"`
|
||||
Lon string `json:"lon"`
|
||||
}
|
||||
|
||||
func geocodeQuery(query string) (latitude, longitude string, found bool, err error) {
|
||||
// URL encode the query
|
||||
query = url.QueryEscape(query)
|
||||
|
||||
// Construct the request URL
|
||||
urlString := fmt.Sprintf("https://nominatim.openstreetmap.org/search?format=json&q=%s", query)
|
||||
|
||||
// Make the HTTP GET request
|
||||
resp, err := http.Get(urlString)
|
||||
if err != nil {
|
||||
return "", "", false, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Read the response
|
||||
var result []NominatimResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", "", false, err
|
||||
}
|
||||
|
||||
// Check if there are any results
|
||||
if len(result) > 0 {
|
||||
latitude = result[0].Lat
|
||||
longitude = result[0].Lon
|
||||
return latitude, longitude, true, nil
|
||||
}
|
||||
|
||||
return "", "", false, nil
|
||||
}
|
||||
|
||||
func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string) {
|
||||
// Geocode the query to get coordinates
|
||||
latitude, longitude, found, err := geocodeQuery(query)
|
||||
if err != nil {
|
||||
printDebug("Error geocoding query: %s, error: %v", query, err)
|
||||
http.Error(w, "Failed to find location", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Use a template to serve the map page
|
||||
data := map[string]interface{}{
|
||||
"Query": query,
|
||||
"Latitude": latitude,
|
||||
"Longitude": longitude,
|
||||
"Found": found,
|
||||
"Theme": settings.Theme,
|
||||
}
|
||||
|
||||
tmpl, err := template.ParseFiles("templates/map.html")
|
||||
if err != nil {
|
||||
printErr("Error loading map template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
tmpl.Execute(w, data)
|
||||
}
|
||||
|
|
438
node-handle-search.go
Normal file → Executable file
438
node-handle-search.go
Normal file → Executable file
|
@ -1,219 +1,219 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
)
|
||||
|
||||
func handleSearchTextMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-text request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchTextResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "text-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
// Log the address to be used for sending the response
|
||||
log.Printf("Sending text search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending text search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchImageMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-image request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchImageResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "image-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
// Log the address to be used for sending the response
|
||||
log.Printf("Sending image search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending image search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchVideoMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-video request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchVideoResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "video-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
log.Printf("Sending video search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending video search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchFileMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-file request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchFileResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "file-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
log.Printf("Sending file search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending file search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchForumMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-forum request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchForumResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "forum-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
// Log the address to be used for sending the response
|
||||
log.Printf("Sending forum search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending forum search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
)
|
||||
|
||||
func handleSearchTextMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
printWarn("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
printDebug("Received search-text request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchTextResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
printWarn("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "text-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
// Log the address to be used for sending the response
|
||||
printDebug("Sending text search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
printErr("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
printWarn("Error sending text search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchImageMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-image request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchImageResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "image-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
// Log the address to be used for sending the response
|
||||
log.Printf("Sending image search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending image search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchVideoMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-video request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchVideoResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "video-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
log.Printf("Sending video search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending video search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchFileMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-file request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchFileResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "file-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
log.Printf("Sending file search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending file search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleSearchForumMessage(msg Message) {
|
||||
var searchParams struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
}
|
||||
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing search parameters: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received search-forum request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||
|
||||
results := fetchForumResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||
resultsJSON, err := json.Marshal(results)
|
||||
if err != nil {
|
||||
log.Printf("Error marshalling search results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
responseMsg := Message{
|
||||
ID: hostID,
|
||||
Type: "forum-results",
|
||||
Content: string(resultsJSON),
|
||||
}
|
||||
|
||||
// Log the address to be used for sending the response
|
||||
log.Printf("Sending forum search results to %s", searchParams.ResponseAddr)
|
||||
|
||||
if searchParams.ResponseAddr == "" {
|
||||
log.Printf("Error: Response address is empty")
|
||||
return
|
||||
}
|
||||
|
||||
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||
if err != nil {
|
||||
log.Printf("Error sending forum search results to %s: %v", searchParams.ResponseAddr, err)
|
||||
}
|
||||
}
|
||||
|
|
165
node-request-files.go
Normal file → Executable file
165
node-request-files.go
Normal file → Executable file
|
@ -1,83 +1,82 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes []string) []TorrentResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendFileSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
log.Printf("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendFileSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TorrentResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-file",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-fileResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleFileResultsMessage(msg Message) {
|
||||
var results []TorrentResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
log.Printf("Error unmarshalling file results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received file results: %+v", results)
|
||||
// Send results to fileResultsChan
|
||||
go func() {
|
||||
fileResultsChan <- results
|
||||
}()
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes []string) []TorrentResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendFileSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendFileSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TorrentResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-file",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-fileResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleFileResultsMessage(msg Message) {
|
||||
var results []TorrentResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
printWarn("Error unmarshalling file results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
printDebug("Received file results: %+v", results)
|
||||
// Send results to fileResultsChan
|
||||
go func() {
|
||||
fileResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
|
201
node-request-forums.go
Normal file → Executable file
201
node-request-forums.go
Normal file → Executable file
|
@ -1,101 +1,100 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
var forumResultsChan = make(chan []ForumSearchResult)
|
||||
|
||||
func tryOtherNodesForForumSearch(query, safe, lang string, page int) []ForumSearchResult {
|
||||
for _, nodeAddr := range peers {
|
||||
results, err := sendForumSearchRequestToNode(nodeAddr, query, safe, lang, page, []string{})
|
||||
if err != nil {
|
||||
log.Printf("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendForumSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
|
||||
// Check if the current node has already been visited
|
||||
for _, node := range visitedNodes {
|
||||
if node == hostID {
|
||||
return nil, fmt.Errorf("loop detected: this node (%s) has already been visited", hostID)
|
||||
}
|
||||
}
|
||||
|
||||
// Add current node to the list of visited nodes
|
||||
visitedNodes = append(visitedNodes, hostID)
|
||||
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-forum",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-forumResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleForumResultsMessage(msg Message) {
|
||||
var results []ForumSearchResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
log.Printf("Error unmarshalling forum results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received forum results: %+v", results)
|
||||
// Send results to forumResultsChan
|
||||
go func() {
|
||||
forumResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
||||
// Used only to answer requests
|
||||
func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
|
||||
results, err := PerformRedditSearch(query, safe, page)
|
||||
if err != nil {
|
||||
log.Printf("Error fetching forum results: %v", err)
|
||||
return nil
|
||||
}
|
||||
return results
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
var forumResultsChan = make(chan []ForumSearchResult)
|
||||
|
||||
func tryOtherNodesForForumSearch(query, safe, lang string, page int) []ForumSearchResult {
|
||||
for _, nodeAddr := range peers {
|
||||
results, err := sendForumSearchRequestToNode(nodeAddr, query, safe, lang, page, []string{})
|
||||
if err != nil {
|
||||
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendForumSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
|
||||
// Check if the current node has already been visited
|
||||
for _, node := range visitedNodes {
|
||||
if node == hostID {
|
||||
return nil, fmt.Errorf("loop detected: this node (%s) has already been visited", hostID)
|
||||
}
|
||||
}
|
||||
|
||||
// Add current node to the list of visited nodes
|
||||
visitedNodes = append(visitedNodes, hostID)
|
||||
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-forum",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-forumResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleForumResultsMessage(msg Message) {
|
||||
var results []ForumSearchResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
printWarn("Error unmarshalling forum results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
printDebug("Received forum results: %+v", results)
|
||||
// Send results to forumResultsChan
|
||||
go func() {
|
||||
forumResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
||||
// Used only to answer requests
|
||||
func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
|
||||
results, err := PerformRedditSearch(query, safe, page)
|
||||
if err != nil {
|
||||
printWarn("Error fetching forum results: %v", err)
|
||||
return nil
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
|
169
node-request-images.go
Normal file → Executable file
169
node-request-images.go
Normal file → Executable file
|
@ -1,85 +1,84 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
var imageResultsChan = make(chan []ImageSearchResult)
|
||||
|
||||
func handleImageResultsMessage(msg Message) {
|
||||
var results []ImageSearchResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
log.Printf("Error unmarshalling image results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received image results: %+v", results)
|
||||
// Send results to imageResultsChan
|
||||
go func() {
|
||||
imageResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
||||
func sendImageSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ImageSearchResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-image",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-imageResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(30 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func tryOtherNodesForImageSearch(query, safe, lang string, page int, visitedNodes []string) []ImageSearchResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendImageSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
log.Printf("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
var imageResultsChan = make(chan []ImageSearchResult)
|
||||
|
||||
func handleImageResultsMessage(msg Message) {
|
||||
var results []ImageSearchResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
printWarn("Error unmarshalling image results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
printDebug("Received image results: %+v", results)
|
||||
// Send results to imageResultsChan
|
||||
go func() {
|
||||
imageResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
||||
func sendImageSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ImageSearchResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-image",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-imageResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(30 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func tryOtherNodesForImageSearch(query, safe, lang string, page int, visitedNodes []string) []ImageSearchResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendImageSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
169
node-request-text.go
Normal file → Executable file
169
node-request-text.go
Normal file → Executable file
|
@ -1,85 +1,84 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
var textResultsChan = make(chan []TextSearchResult)
|
||||
|
||||
func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes []string) []TextSearchResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendTextSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
log.Printf("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendTextSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TextSearchResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-text",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-textResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleTextResultsMessage(msg Message) {
|
||||
var results []TextSearchResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
log.Printf("Error unmarshalling text results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received text results: %+v", results)
|
||||
// Send results to textResultsChan
|
||||
go func() {
|
||||
textResultsChan <- results
|
||||
}()
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
var textResultsChan = make(chan []TextSearchResult)
|
||||
|
||||
func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes []string) []TextSearchResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendTextSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendTextSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TextSearchResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-text",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-textResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleTextResultsMessage(msg Message) {
|
||||
var results []TextSearchResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
printWarn("Error unmarshalling text results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
printDebug("Received text results: %+v", results)
|
||||
// Send results to textResultsChan
|
||||
go func() {
|
||||
textResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
|
165
node-request-video.go
Normal file → Executable file
165
node-request-video.go
Normal file → Executable file
|
@ -1,83 +1,82 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNodes []string) []VideoResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendVideoSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
log.Printf("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendVideoSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]VideoResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-video",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-videoResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleVideoResultsMessage(msg Message) {
|
||||
var results []VideoResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
log.Printf("Error unmarshalling video results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Received video results: %+v", results)
|
||||
// Send results to videoResultsChan
|
||||
go func() {
|
||||
videoResultsChan <- results
|
||||
}()
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNodes []string) []VideoResult {
|
||||
for _, nodeAddr := range peers {
|
||||
if contains(visitedNodes, nodeAddr) {
|
||||
continue // Skip nodes already visited
|
||||
}
|
||||
results, err := sendVideoSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||
if err != nil {
|
||||
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||
continue
|
||||
}
|
||||
if len(results) > 0 {
|
||||
return results
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendVideoSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]VideoResult, error) {
|
||||
visitedNodes = append(visitedNodes, nodeAddr)
|
||||
searchParams := struct {
|
||||
Query string `json:"query"`
|
||||
Safe string `json:"safe"`
|
||||
Lang string `json:"lang"`
|
||||
Page int `json:"page"`
|
||||
ResponseAddr string `json:"responseAddr"`
|
||||
VisitedNodes []string `json:"visitedNodes"`
|
||||
}{
|
||||
Query: query,
|
||||
Safe: safe,
|
||||
Lang: lang,
|
||||
Page: page,
|
||||
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||
VisitedNodes: visitedNodes,
|
||||
}
|
||||
|
||||
msgBytes, err := json.Marshal(searchParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||
}
|
||||
|
||||
msg := Message{
|
||||
ID: hostID,
|
||||
Type: "search-video",
|
||||
Content: string(msgBytes),
|
||||
}
|
||||
|
||||
err = sendMessage(nodeAddr, msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||
}
|
||||
|
||||
// Wait for results
|
||||
select {
|
||||
case res := <-videoResultsChan:
|
||||
return res, nil
|
||||
case <-time.After(20 * time.Second):
|
||||
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||
}
|
||||
}
|
||||
|
||||
func handleVideoResultsMessage(msg Message) {
|
||||
var results []VideoResult
|
||||
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||
if err != nil {
|
||||
printWarn("Error unmarshalling video results: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
printDebug("Received video results: %+v", results)
|
||||
// Send results to videoResultsChan
|
||||
go func() {
|
||||
videoResultsChan <- results
|
||||
}()
|
||||
}
|
||||
|
|
31
run.bat
Executable file
31
run.bat
Executable file
|
@ -0,0 +1,31 @@
|
|||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
rem Directory where the Go files are located
|
||||
set GO_DIR=C:\path\to\your\go\files
|
||||
|
||||
rem Explicitly list the main files in the required order
|
||||
set FILES=main.go init.go search-engine.go text.go text-google.go text-librex.go text-brave.go text-duckduckgo.go common.go cache.go agent.go files.go files-thepiratebay.go files-torrentgalaxy.go forums.go get-searchxng.go imageproxy.go images.go images-imgur.go images-quant.go map.go node.go open-search.go video.go
|
||||
|
||||
rem Change to the directory with the Go files
|
||||
pushd %GO_DIR%
|
||||
|
||||
rem Find all other .go files that were not explicitly listed
|
||||
set OTHER_GO_FILES=
|
||||
|
||||
for %%f in (*.go) do (
|
||||
set file=%%~nxf
|
||||
set found=0
|
||||
for %%i in (%FILES%) do (
|
||||
if /i "%%i"=="!file!" set found=1
|
||||
)
|
||||
if !found!==0 (
|
||||
set OTHER_GO_FILES=!OTHER_GO_FILES! "%%f"
|
||||
)
|
||||
)
|
||||
|
||||
rem Run the Go program with the specified files first, followed by the remaining files
|
||||
go run %FILES% %OTHER_GO_FILES%
|
||||
|
||||
rem Return to the original directory
|
||||
popd
|
366
text.go
Normal file → Executable file
366
text.go
Normal file → Executable file
|
@ -1,183 +1,183 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
var textSearchEngines []SearchEngine
|
||||
|
||||
func init() {
|
||||
textSearchEngines = []SearchEngine{
|
||||
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
|
||||
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
|
||||
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
|
||||
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5},
|
||||
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
||||
}
|
||||
}
|
||||
|
||||
func HandleTextSearch(w http.ResponseWriter, settings UserSettings, query, safe, lang string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
||||
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
|
||||
|
||||
hasPrevPage := page > 1 // dupe
|
||||
|
||||
//displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
||||
|
||||
// Prefetch next and previous pages
|
||||
go prefetchPage(query, safe, lang, page+1)
|
||||
if hasPrevPage {
|
||||
go prefetchPage(query, safe, lang, page-1)
|
||||
}
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
tmpl, err := template.New("text.html").Funcs(funcs).ParseFiles("templates/text.html")
|
||||
if err != nil {
|
||||
printErr("Error parsing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Results []TextSearchResult
|
||||
Query string
|
||||
Page int
|
||||
Fetched string
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
NoResults bool
|
||||
Theme string
|
||||
}{
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Page: page,
|
||||
Fetched: fmt.Sprintf("%.2f seconds", elapsedTime.Seconds()),
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: lang,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(combinedResults) >= 50,
|
||||
NoResults: len(combinedResults) == 0,
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
err = tmpl.Execute(w, data)
|
||||
if err != nil {
|
||||
printErr("Error executing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []TextSearchResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
printInfo("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
printInfo("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
textResults, _, _ := convertToSpecificResults(results)
|
||||
combinedResults = textResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printInfo("Cache check timeout")
|
||||
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func prefetchPage(query, safe, lang string, page int) {
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
||||
if _, exists := resultsCache.Get(cacheKey); !exists {
|
||||
printInfo("Page %d not cached, caching now...", page)
|
||||
pageResults := fetchTextResults(query, safe, lang, page)
|
||||
if len(pageResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
||||
}
|
||||
} else {
|
||||
printInfo("Page %d already cached", page)
|
||||
}
|
||||
}
|
||||
|
||||
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||
var results []TextSearchResult
|
||||
|
||||
for _, engine := range textSearchEngines {
|
||||
printInfo("Using search engine: %s", engine.Name)
|
||||
|
||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
||||
updateEngineMetrics(&engine, duration, err == nil)
|
||||
if err != nil {
|
||||
printWarn("Error performing search with %s: %v", engine.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, validateResults(searchResults)...)
|
||||
|
||||
// If results are found, break out of the loop
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no results found after trying all engines
|
||||
if len(results) == 0 {
|
||||
printWarn("No text results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForTextSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func validateResults(searchResults []SearchResult) []TextSearchResult {
|
||||
var validResults []TextSearchResult
|
||||
|
||||
// Remove anything that is missing a URL or Header
|
||||
for _, result := range searchResults {
|
||||
textResult := result.(TextSearchResult)
|
||||
if textResult.URL != "" || textResult.Header != "" {
|
||||
validResults = append(validResults, textResult)
|
||||
}
|
||||
}
|
||||
|
||||
return validResults
|
||||
}
|
||||
|
||||
func wrapTextSearchFunc(f func(string, string, string, int) ([]TextSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
||||
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
||||
textResults, duration, err := f(query, safe, lang, page)
|
||||
if err != nil {
|
||||
return nil, duration, err
|
||||
}
|
||||
searchResults := make([]SearchResult, len(textResults))
|
||||
for i, result := range textResults {
|
||||
searchResults[i] = result
|
||||
}
|
||||
return searchResults, duration, nil
|
||||
}
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
var textSearchEngines []SearchEngine
|
||||
|
||||
func init() {
|
||||
textSearchEngines = []SearchEngine{
|
||||
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
|
||||
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
|
||||
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
|
||||
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5},
|
||||
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
||||
}
|
||||
}
|
||||
|
||||
func HandleTextSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "true", Lang: settings.Language, Type: "text"}
|
||||
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.Language, page)
|
||||
|
||||
hasPrevPage := page > 1 // dupe
|
||||
|
||||
//displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
||||
|
||||
// Prefetch next and previous pages
|
||||
go prefetchPage(query, settings.SafeSearch, settings.Language, page+1)
|
||||
if hasPrevPage {
|
||||
go prefetchPage(query, settings.SafeSearch, settings.Language, page-1)
|
||||
}
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
tmpl, err := template.New("text.html").Funcs(funcs).ParseFiles("templates/text.html")
|
||||
if err != nil {
|
||||
printErr("Error parsing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Results []TextSearchResult
|
||||
Query string
|
||||
Page int
|
||||
Fetched string
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
NoResults bool
|
||||
Theme string
|
||||
}{
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Page: page,
|
||||
Fetched: fmt.Sprintf("%.2f seconds", elapsedTime.Seconds()),
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: settings.Language,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(combinedResults) >= 50,
|
||||
NoResults: len(combinedResults) == 0,
|
||||
Theme: settings.Theme,
|
||||
}
|
||||
|
||||
err = tmpl.Execute(w, data)
|
||||
if err != nil {
|
||||
printErr("Error executing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []TextSearchResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
printInfo("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
printInfo("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
textResults, _, _ := convertToSpecificResults(results)
|
||||
combinedResults = textResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printInfo("Cache check timeout")
|
||||
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func prefetchPage(query, safe, lang string, page int) {
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
||||
if _, exists := resultsCache.Get(cacheKey); !exists {
|
||||
printInfo("Page %d not cached, caching now...", page)
|
||||
pageResults := fetchTextResults(query, safe, lang, page)
|
||||
if len(pageResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
||||
}
|
||||
} else {
|
||||
printInfo("Page %d already cached", page)
|
||||
}
|
||||
}
|
||||
|
||||
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||
var results []TextSearchResult
|
||||
|
||||
for _, engine := range textSearchEngines {
|
||||
printInfo("Using search engine: %s", engine.Name)
|
||||
|
||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
||||
updateEngineMetrics(&engine, duration, err == nil)
|
||||
if err != nil {
|
||||
printWarn("Error performing search with %s: %v", engine.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, validateResults(searchResults)...)
|
||||
|
||||
// If results are found, break out of the loop
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no results found after trying all engines
|
||||
if len(results) == 0 {
|
||||
printWarn("No text results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForTextSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func validateResults(searchResults []SearchResult) []TextSearchResult {
|
||||
var validResults []TextSearchResult
|
||||
|
||||
// Remove anything that is missing a URL or Header
|
||||
for _, result := range searchResults {
|
||||
textResult := result.(TextSearchResult)
|
||||
if textResult.URL != "" || textResult.Header != "" {
|
||||
validResults = append(validResults, textResult)
|
||||
}
|
||||
}
|
||||
|
||||
return validResults
|
||||
}
|
||||
|
||||
func wrapTextSearchFunc(f func(string, string, string, int) ([]TextSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
||||
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
||||
textResults, duration, err := f(query, safe, lang, page)
|
||||
if err != nil {
|
||||
return nil, duration, err
|
||||
}
|
||||
searchResults := make([]SearchResult, len(textResults))
|
||||
for i, result := range textResults {
|
||||
searchResults[i] = result
|
||||
}
|
||||
return searchResults, duration, nil
|
||||
}
|
||||
}
|
||||
|
|
422
video.go
Normal file → Executable file
422
video.go
Normal file → Executable file
|
@ -1,211 +1,211 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const retryDuration = 12 * time.Hour // Retry duration for unresponding piped instances
|
||||
|
||||
var (
|
||||
pipedInstances = []string{
|
||||
"api.piped.yt",
|
||||
"pipedapi.moomoo.me",
|
||||
"pipedapi.darkness.services",
|
||||
"pipedapi.kavin.rocks",
|
||||
"piped-api.hostux.net",
|
||||
"pipedapi.syncpundit.io",
|
||||
"piped-api.cfe.re",
|
||||
"pipedapi.in.projectsegfau.lt",
|
||||
"piapi.ggtyler.dev",
|
||||
"piped-api.codespace.cz",
|
||||
"pipedapi.coldforge.xyz",
|
||||
"pipedapi.osphost.fi",
|
||||
}
|
||||
disabledInstances = make(map[string]bool)
|
||||
mu sync.Mutex
|
||||
videoResultsChan = make(chan []VideoResult) // Channel to receive video results from other nodes
|
||||
)
|
||||
|
||||
// VideoAPIResponse matches the structure of the JSON response from the Piped API
|
||||
type VideoAPIResponse struct {
|
||||
Items []struct {
|
||||
URL string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
UploaderName string `json:"uploaderName"`
|
||||
Views int `json:"views"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Duration int `json:"duration"`
|
||||
UploadedDate string `json:"uploadedDate"`
|
||||
Type string `json:"type"`
|
||||
} `json:"items"`
|
||||
}
|
||||
|
||||
// Function to format views similarly to the Python code
|
||||
func formatViews(views int) string {
|
||||
switch {
|
||||
case views >= 1_000_000_000:
|
||||
return fmt.Sprintf("%.1fB views", float64(views)/1_000_000_000)
|
||||
case views >= 1_000_000:
|
||||
return fmt.Sprintf("%.1fM views", float64(views)/1_000_000)
|
||||
case views >= 10_000:
|
||||
return fmt.Sprintf("%.1fK views", float64(views)/1_000)
|
||||
case views == 1:
|
||||
return fmt.Sprintf("%d view", views)
|
||||
default:
|
||||
return fmt.Sprintf("%d views", views)
|
||||
}
|
||||
}
|
||||
|
||||
// formatDuration formats video duration as done in the Python code
|
||||
func formatDuration(seconds int) string {
|
||||
if 0 > seconds {
|
||||
return "Live"
|
||||
}
|
||||
|
||||
hours := seconds / 3600
|
||||
minutes := (seconds % 3600) / 60
|
||||
seconds = seconds % 60
|
||||
|
||||
if hours > 0 {
|
||||
return fmt.Sprintf("%02d:%02d:%02d", hours, minutes, seconds)
|
||||
}
|
||||
return fmt.Sprintf("%02d:%02d", minutes, seconds)
|
||||
}
|
||||
|
||||
func init() {
|
||||
go checkDisabledInstancesPeriodically()
|
||||
}
|
||||
|
||||
func checkDisabledInstancesPeriodically() {
|
||||
checkAndReactivateInstances() // Initial immediate check
|
||||
ticker := time.NewTicker(retryDuration)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
checkAndReactivateInstances()
|
||||
}
|
||||
}
|
||||
|
||||
func checkAndReactivateInstances() {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
for instance, isDisabled := range disabledInstances {
|
||||
if isDisabled {
|
||||
// Check if the instance is available again
|
||||
if testInstanceAvailability(instance) {
|
||||
printInfo("Instance %s is now available and reactivated.", instance)
|
||||
delete(disabledInstances, instance)
|
||||
} else {
|
||||
printInfo("Instance %s is still not available.", instance)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testInstanceAvailability(instance string) bool {
|
||||
resp, err := http.Get(fmt.Sprintf("https://%s/search?q=%s&filter=all", instance, url.QueryEscape("test")))
|
||||
if err != nil || resp.StatusCode != http.StatusOK {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func makeHTMLRequest(query, safe, lang string, page int) (*VideoAPIResponse, error) {
|
||||
var lastError error
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
for _, instance := range pipedInstances {
|
||||
if disabledInstances[instance] {
|
||||
continue // Skip this instance because it's still disabled
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("https://%s/search?q=%s&filter=all&safe=%s&lang=%s&page=%d", instance, url.QueryEscape(query), safe, lang, page)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil || resp.StatusCode != http.StatusOK {
|
||||
printInfo("Disabling instance %s due to error or status code: %v", instance, err)
|
||||
disabledInstances[instance] = true
|
||||
lastError = fmt.Errorf("error making request to %s: %w", instance, err)
|
||||
continue
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
var apiResp VideoAPIResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
||||
lastError = fmt.Errorf("error decoding response from %s: %w", instance, err)
|
||||
continue
|
||||
}
|
||||
return &apiResp, nil
|
||||
}
|
||||
return nil, fmt.Errorf("all instances failed, last error: %v", lastError)
|
||||
}
|
||||
|
||||
// handleVideoSearch adapted from the Python `videoResults`, handles video search requests
|
||||
func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query, safe, lang string, page int) {
|
||||
start := time.Now()
|
||||
|
||||
results := fetchVideoResults(query, settings.SafeSearch, settings.Language, page)
|
||||
if len(results) == 0 {
|
||||
printWarn("No results from primary search, trying other nodes")
|
||||
results = tryOtherNodesForVideoSearch(query, settings.SafeSearch, settings.Language, page, []string{hostID})
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
tmpl, err := template.New("videos.html").Funcs(funcs).ParseFiles("templates/videos.html")
|
||||
if err != nil {
|
||||
printErr("Error parsing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = tmpl.Execute(w, map[string]interface{}{
|
||||
"Results": results,
|
||||
"Query": query,
|
||||
"Fetched": fmt.Sprintf("%.2f seconds", elapsed.Seconds()),
|
||||
"Page": page,
|
||||
"HasPrevPage": page > 1,
|
||||
"HasNextPage": len(results) > 0, // no
|
||||
"Theme": settings.Theme,
|
||||
})
|
||||
if err != nil {
|
||||
printErr("Error executing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
|
||||
apiResp, err := makeHTMLRequest(query, safe, lang, page)
|
||||
if err != nil {
|
||||
printWarn("Error fetching video results: %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []VideoResult
|
||||
for _, item := range apiResp.Items {
|
||||
if item.Type == "channel" || item.Type == "playlist" {
|
||||
continue
|
||||
}
|
||||
if item.UploadedDate == "" {
|
||||
item.UploadedDate = "Now"
|
||||
}
|
||||
|
||||
results = append(results, VideoResult{
|
||||
Href: fmt.Sprintf("https://youtube.com%s", item.URL),
|
||||
Title: item.Title,
|
||||
Date: item.UploadedDate,
|
||||
Views: formatViews(item.Views),
|
||||
Creator: item.UploaderName,
|
||||
Publisher: "Piped",
|
||||
Image: fmt.Sprintf("/img_proxy?url=%s", url.QueryEscape(item.Thumbnail)),
|
||||
Duration: formatDuration(item.Duration),
|
||||
})
|
||||
}
|
||||
return results
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const retryDuration = 12 * time.Hour // Retry duration for unresponding piped instances
|
||||
|
||||
var (
|
||||
pipedInstances = []string{
|
||||
"api.piped.yt",
|
||||
"pipedapi.moomoo.me",
|
||||
"pipedapi.darkness.services",
|
||||
"pipedapi.kavin.rocks",
|
||||
"piped-api.hostux.net",
|
||||
"pipedapi.syncpundit.io",
|
||||
"piped-api.cfe.re",
|
||||
"pipedapi.in.projectsegfau.lt",
|
||||
"piapi.ggtyler.dev",
|
||||
"piped-api.codespace.cz",
|
||||
"pipedapi.coldforge.xyz",
|
||||
"pipedapi.osphost.fi",
|
||||
}
|
||||
disabledInstances = make(map[string]bool)
|
||||
mu sync.Mutex
|
||||
videoResultsChan = make(chan []VideoResult) // Channel to receive video results from other nodes
|
||||
)
|
||||
|
||||
// VideoAPIResponse matches the structure of the JSON response from the Piped API
|
||||
type VideoAPIResponse struct {
|
||||
Items []struct {
|
||||
URL string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
UploaderName string `json:"uploaderName"`
|
||||
Views int `json:"views"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Duration int `json:"duration"`
|
||||
UploadedDate string `json:"uploadedDate"`
|
||||
Type string `json:"type"`
|
||||
} `json:"items"`
|
||||
}
|
||||
|
||||
// Function to format views similarly to the Python code
|
||||
func formatViews(views int) string {
|
||||
switch {
|
||||
case views >= 1_000_000_000:
|
||||
return fmt.Sprintf("%.1fB views", float64(views)/1_000_000_000)
|
||||
case views >= 1_000_000:
|
||||
return fmt.Sprintf("%.1fM views", float64(views)/1_000_000)
|
||||
case views >= 10_000:
|
||||
return fmt.Sprintf("%.1fK views", float64(views)/1_000)
|
||||
case views == 1:
|
||||
return fmt.Sprintf("%d view", views)
|
||||
default:
|
||||
return fmt.Sprintf("%d views", views)
|
||||
}
|
||||
}
|
||||
|
||||
// formatDuration formats video duration as done in the Python code
|
||||
func formatDuration(seconds int) string {
|
||||
if 0 > seconds {
|
||||
return "Live"
|
||||
}
|
||||
|
||||
hours := seconds / 3600
|
||||
minutes := (seconds % 3600) / 60
|
||||
seconds = seconds % 60
|
||||
|
||||
if hours > 0 {
|
||||
return fmt.Sprintf("%02d:%02d:%02d", hours, minutes, seconds)
|
||||
}
|
||||
return fmt.Sprintf("%02d:%02d", minutes, seconds)
|
||||
}
|
||||
|
||||
func init() {
|
||||
go checkDisabledInstancesPeriodically()
|
||||
}
|
||||
|
||||
func checkDisabledInstancesPeriodically() {
|
||||
checkAndReactivateInstances() // Initial immediate check
|
||||
ticker := time.NewTicker(retryDuration)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
checkAndReactivateInstances()
|
||||
}
|
||||
}
|
||||
|
||||
func checkAndReactivateInstances() {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
for instance, isDisabled := range disabledInstances {
|
||||
if isDisabled {
|
||||
// Check if the instance is available again
|
||||
if testInstanceAvailability(instance) {
|
||||
printInfo("Instance %s is now available and reactivated.", instance)
|
||||
delete(disabledInstances, instance)
|
||||
} else {
|
||||
printInfo("Instance %s is still not available.", instance)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testInstanceAvailability(instance string) bool {
|
||||
resp, err := http.Get(fmt.Sprintf("https://%s/search?q=%s&filter=all", instance, url.QueryEscape("test")))
|
||||
if err != nil || resp.StatusCode != http.StatusOK {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func makeHTMLRequest(query, safe, lang string, page int) (*VideoAPIResponse, error) {
|
||||
var lastError error
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
for _, instance := range pipedInstances {
|
||||
if disabledInstances[instance] {
|
||||
continue // Skip this instance because it's still disabled
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("https://%s/search?q=%s&filter=all&safe=%s&lang=%s&page=%d", instance, url.QueryEscape(query), safe, lang, page)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil || resp.StatusCode != http.StatusOK {
|
||||
printInfo("Disabling instance %s due to error or status code: %v", instance, err)
|
||||
disabledInstances[instance] = true
|
||||
lastError = fmt.Errorf("error making request to %s: %w", instance, err)
|
||||
continue
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
var apiResp VideoAPIResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
||||
lastError = fmt.Errorf("error decoding response from %s: %w", instance, err)
|
||||
continue
|
||||
}
|
||||
return &apiResp, nil
|
||||
}
|
||||
return nil, fmt.Errorf("all instances failed, last error: %v", lastError)
|
||||
}
|
||||
|
||||
// handleVideoSearch adapted from the Python `videoResults`, handles video search requests
|
||||
func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||
start := time.Now()
|
||||
|
||||
results := fetchVideoResults(query, settings.SafeSearch, settings.Language, page)
|
||||
if len(results) == 0 {
|
||||
printWarn("No results from primary search, trying other nodes")
|
||||
results = tryOtherNodesForVideoSearch(query, settings.SafeSearch, settings.Language, page, []string{hostID})
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
tmpl, err := template.New("videos.html").Funcs(funcs).ParseFiles("templates/videos.html")
|
||||
if err != nil {
|
||||
printErr("Error parsing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = tmpl.Execute(w, map[string]interface{}{
|
||||
"Results": results,
|
||||
"Query": query,
|
||||
"Fetched": fmt.Sprintf("%.2f seconds", elapsed.Seconds()),
|
||||
"Page": page,
|
||||
"HasPrevPage": page > 1,
|
||||
"HasNextPage": len(results) > 0, // no
|
||||
"Theme": settings.Theme,
|
||||
})
|
||||
if err != nil {
|
||||
printErr("Error executing template: %v", err)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
|
||||
apiResp, err := makeHTMLRequest(query, safe, lang, page)
|
||||
if err != nil {
|
||||
printWarn("Error fetching video results: %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []VideoResult
|
||||
for _, item := range apiResp.Items {
|
||||
if item.Type == "channel" || item.Type == "playlist" {
|
||||
continue
|
||||
}
|
||||
if item.UploadedDate == "" {
|
||||
item.UploadedDate = "Now"
|
||||
}
|
||||
|
||||
results = append(results, VideoResult{
|
||||
Href: fmt.Sprintf("https://youtube.com%s", item.URL),
|
||||
Title: item.Title,
|
||||
Date: item.UploadedDate,
|
||||
Views: formatViews(item.Views),
|
||||
Creator: item.UploaderName,
|
||||
Publisher: "Piped",
|
||||
Image: fmt.Sprintf("/img_proxy?url=%s", url.QueryEscape(item.Thumbnail)),
|
||||
Duration: formatDuration(item.Duration),
|
||||
})
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue