Compare commits

..

No commits in common. "main" and "indexing" have entirely different histories.

101 changed files with 826 additions and 2673 deletions

View file

@ -47,11 +47,11 @@ A self-hosted private search engine designed to be scalable and more resource-ef
### For Self-Hosting ### For Self-Hosting
- **[Easy to Set Up](https://weforge.xyz/Spitfire/Search/wiki/Setup-Other)** - Quick and straightforward setup process for anyone. - **Self-hosted option** - Run on your own server for even more privacy.
- **Lightweight** - Low memory footprint (15-30MiB) even during searches. - **Lightweight** - Low memory footprint (15-30MiB) even during searches.
- **Decentralized** - No single point of failure. - **Decentralized** - No single point of failure.
- **Results caching in RAM** - Faster response times through caching. - **Results caching in RAM** - Faster response times through caching.
- **[Configurable](https://weforge.xyz/Spitfire/Search/wiki/Config)** - Fully customizable via the `config.ini` file. - **Configurable** - Tweak features via `config.ini`.
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time. - **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
### Results Sources ### Results Sources
@ -73,20 +73,30 @@ A self-hosted private search engine designed to be scalable and more resource-ef
### Running the QGato ### Running the QGato
Linux:
```bash ```bash
git clone https://weforge.xyz/Spitfire/Search.git git clone https://weforge.xyz/Spitfire/Search.git
cd Search cd Search
go run . chmod +x ./run.sh
./run.sh
```
Windows:
```powershell
git clone https://weforge.xyz/Spitfire/Search.git
cd Search
.\run.bat
``` ```
*Its that easy!* *Its that easy!*
### Configuring ### Configuring
- Configuration is done via the `config.ini` file. Configuration is done via the ``config.ini`` file.
- On first start, you will be guided through the basic setup. On first start, you will be guided through the basic setup.
- For more advanced configuration options, visit the [Wiki Configuration Page](https://weforge.xyz/Spitfire/Search/wiki/Configuration). More advanced setup and all options will be listed here later, as this is still being updated.
## License ## License

162
agent.go
View file

@ -11,13 +11,11 @@ import (
"time" "time"
) )
// BrowserVersion represents the version & global usage from the caniuse data
type BrowserVersion struct { type BrowserVersion struct {
Version string `json:"version"` Version string `json:"version"`
Global float64 `json:"global"` Global float64 `json:"global"`
} }
// BrowserData holds sets of versions for Firefox and Chromium
type BrowserData struct { type BrowserData struct {
Firefox []BrowserVersion `json:"firefox"` Firefox []BrowserVersion `json:"firefox"`
Chromium []BrowserVersion `json:"chrome"` Chromium []BrowserVersion `json:"chrome"`
@ -30,7 +28,6 @@ var (
}{ }{
data: make(map[string]string), data: make(map[string]string),
} }
browserCache = struct { browserCache = struct {
sync.RWMutex sync.RWMutex
data BrowserData data BrowserData
@ -40,19 +37,26 @@ var (
} }
) )
// fetchLatestBrowserVersions retrieves usage data from caniuse.coms fulldata JSON.
func fetchLatestBrowserVersions() (BrowserData, error) { func fetchLatestBrowserVersions() (BrowserData, error) {
const urlCaniuse = "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json" url := "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
// // Optional: skip TLS verification to avoid certificate errors
// transport := &http.Transport{
// TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
// }
// Increase the HTTP client timeout
client := &http.Client{ client := &http.Client{
Timeout: 30 * time.Second, Timeout: 30 * time.Second,
// Transport: transport,
} }
req, err := http.NewRequest("GET", urlCaniuse, nil) // Build the request manually to set headers
req, err := http.NewRequest("GET", url, nil)
if err != nil { if err != nil {
return BrowserData{}, err return BrowserData{}, err
} }
// Custom user agent and English language preference
// Set a simple custom User-Agent and language
req.Header.Set("User-Agent", "MyCustomAgent/1.0 (compatible; +https://example.com)") req.Header.Set("User-Agent", "MyCustomAgent/1.0 (compatible; +https://example.com)")
req.Header.Set("Accept-Language", "en-US,en;q=0.9") req.Header.Set("Accept-Language", "en-US,en;q=0.9")
@ -67,42 +71,36 @@ func fetchLatestBrowserVersions() (BrowserData, error) {
return BrowserData{}, err return BrowserData{}, err
} }
var rawData map[string]any var rawData map[string]interface{}
if err := json.Unmarshal(body, &rawData); err != nil { if err := json.Unmarshal(body, &rawData); err != nil {
return BrowserData{}, err return BrowserData{}, err
} }
stats, ok := rawData["agents"].(map[string]any) stats := rawData["agents"].(map[string]interface{})
if !ok {
return BrowserData{}, fmt.Errorf("unexpected JSON structure (no 'agents' field)")
}
var data BrowserData var data BrowserData
// Extract Firefox data if firefoxData, ok := stats["firefox"].(map[string]interface{}); ok {
if firefoxData, ok := stats["firefox"].(map[string]any); ok { for version, usage := range firefoxData["usage_global"].(map[string]interface{}) {
if usageMap, ok := firefoxData["usage_global"].(map[string]any); ok { data.Firefox = append(data.Firefox, BrowserVersion{
for version, usage := range usageMap { Version: version,
val, _ := usage.(float64) Global: usage.(float64),
data.Firefox = append(data.Firefox, BrowserVersion{Version: version, Global: val}) })
}
} }
} }
// Extract Chrome data if chromeData, ok := stats["chrome"].(map[string]interface{}); ok {
if chromeData, ok := stats["chrome"].(map[string]any); ok { for version, usage := range chromeData["usage_global"].(map[string]interface{}) {
if usageMap, ok := chromeData["usage_global"].(map[string]any); ok { data.Chromium = append(data.Chromium, BrowserVersion{
for version, usage := range usageMap { Version: version,
val, _ := usage.(float64) Global: usage.(float64),
data.Chromium = append(data.Chromium, BrowserVersion{Version: version, Global: val}) })
}
} }
} }
return data, nil return data, nil
} }
// getLatestBrowserVersions checks the cache and fetches new data if expired
func getLatestBrowserVersions() (BrowserData, error) { func getLatestBrowserVersions() (BrowserData, error) {
browserCache.RLock() browserCache.RLock()
if time.Now().Before(browserCache.expires) { if time.Now().Before(browserCache.expires) {
@ -119,36 +117,37 @@ func getLatestBrowserVersions() (BrowserData, error) {
browserCache.Lock() browserCache.Lock()
browserCache.data = data browserCache.data = data
browserCache.expires = time.Now().Add(24 * time.Hour) // Refresh daily browserCache.expires = time.Now().Add(24 * time.Hour)
browserCache.Unlock() browserCache.Unlock()
return data, nil return data, nil
} }
// randomUserAgent picks a random browser (Firefox/Chromium), selects a version based on usage,
// picks an OS string, and composes a User-Agent header.
func randomUserAgent() (string, error) { func randomUserAgent() (string, error) {
browsers, err := getLatestBrowserVersions() browsers, err := getLatestBrowserVersions()
if err != nil { if err != nil {
return "", err return "", err
} }
r := rand.New(rand.NewSource(time.Now().UnixNano())) rand := rand.New(rand.NewSource(time.Now().UnixNano()))
// Overall usage: 80% chance for Chromium, 20% for Firefox // Simulated browser usage statistics (in percentages)
usageStats := map[string]float64{ usageStats := map[string]float64{
"Firefox": 20.0, "Firefox": 30.0,
"Chromium": 80.0, "Chromium": 70.0,
} }
// Weighted random selection of the browser type // Calculate the probabilities for the versions
probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
// Select a browser based on usage statistics
browserType := "" browserType := ""
randVal := r.Float64() * 100 randVal := rand.Float64() * 100
cumulative := 0.0 cumulative := 0.0
for bType, usage := range usageStats { for browser, usage := range usageStats {
cumulative += usage cumulative += usage
if randVal < cumulative { if randVal < cumulative {
browserType = bType browserType = browser
break break
} }
} }
@ -165,16 +164,14 @@ func randomUserAgent() (string, error) {
return "", fmt.Errorf("no versions found for browser: %s", browserType) return "", fmt.Errorf("no versions found for browser: %s", browserType)
} }
// Sort by global usage descending // Sort versions by usage (descending order)
sort.Slice(versions, func(i, j int) bool { sort.Slice(versions, func(i, j int) bool {
return versions[i].Global > versions[j].Global return versions[i].Global > versions[j].Global
}) })
// Probability distribution for top few versions // Select a version based on the probabilities
probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
version := "" version := ""
randVal = r.Float64() randVal = rand.Float64()
cumulative = 0.0 cumulative = 0.0
for i, p := range probabilities { for i, p := range probabilities {
cumulative += p cumulative += p
@ -184,72 +181,68 @@ func randomUserAgent() (string, error) {
} }
} }
// Fallback to the least used version if none matched // Fallback to the last version if none matched
if version == "" { if version == "" {
version = versions[len(versions)-1].Version version = versions[len(versions)-1].Version
} }
userAgent := generateUserAgent(browserType, version, r) // Generate the user agent string
userAgent := generateUserAgent(browserType, version)
return userAgent, nil return userAgent, nil
} }
// generateUserAgent composes the final UA string given the browser, version, and OS. func generateUserAgent(browser, version string) string {
func generateUserAgent(browser, version string, r *rand.Rand) string {
oses := []struct { oses := []struct {
os string os string
probability float64 probability float64
}{ }{
{"Windows NT 10.0; Win64; x64", 44.0}, {"Windows NT 10.0; Win64; x64", 44.0},
{"X11; Linux x86_64", 2.0}, {"Windows NT 11.0; Win64; x64", 44.0},
{"X11; Ubuntu; Linux x86_64", 2.0}, {"X11; Linux x86_64", 1.0},
{"X11; Ubuntu; Linux x86_64", 1.0},
{"Macintosh; Intel Mac OS X 10_15_7", 10.0}, {"Macintosh; Intel Mac OS X 10_15_7", 10.0},
} }
// Weighted random selection for OS // Select an OS based on probabilities
randVal := r.Float64() * 100 randVal := rand.Float64() * 100
cumulative := 0.0 cumulative := 0.0
selectedOS := oses[0].os // Default in case distribution is off selectedOS := ""
for _, entry := range oses { for _, os := range oses {
cumulative += entry.probability cumulative += os.probability
if randVal < cumulative { if randVal < cumulative {
selectedOS = entry.os selectedOS = os.os
break break
} }
} }
switch browser { switch browser {
case "Firefox": case "Firefox":
// Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:117.0) Gecko/20100101 Firefox/117.0
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version) return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
case "Chromium": case "Chromium":
// Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.5938.132 Safari/537.36
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version) return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
default:
return ""
} }
return ""
} }
// updateCachedUserAgents randomly updates half of the cached UAs to new versions
func updateCachedUserAgents(newVersions BrowserData) { func updateCachedUserAgents(newVersions BrowserData) {
cache.Lock() cache.Lock()
defer cache.Unlock() defer cache.Unlock()
r := rand.New(rand.NewSource(time.Now().UnixNano()))
for key, userAgent := range cache.data { for key, userAgent := range cache.data {
if r.Float64() < 0.5 { randVal := rand.Float64()
updatedUserAgent := updateUserAgentVersion(userAgent, newVersions, r) if randVal < 0.5 {
updatedUserAgent := updateUserAgentVersion(userAgent, newVersions)
cache.data[key] = updatedUserAgent cache.data[key] = updatedUserAgent
} }
} }
} }
// updateUserAgentVersion tries to parse the old UA, detect its browser, and update the version func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.Rand) string { // Parse the current user agent to extract browser and version
var browserType, version string var browserType, version string
// Attempt to detect old UA patterns (Chromium or Firefox)
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil { if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium" browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil { } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium" browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil { } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
@ -258,6 +251,8 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.R
browserType = "Chromium" browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil { } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox" browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil { } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox" browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil { } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
@ -266,37 +261,22 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.R
browserType = "Firefox" browserType = "Firefox"
} }
// Grab the newest version from the fetched data // Get the latest version for that browser
var latestVersion string var latestVersion string
if browserType == "Firefox" && len(newVersions.Firefox) > 0 { if browserType == "Firefox" && len(newVersions.Firefox) > 0 {
// Sort by usage descending
sort.Slice(newVersions.Firefox, func(i, j int) bool {
return newVersions.Firefox[i].Global > newVersions.Firefox[j].Global
})
latestVersion = newVersions.Firefox[0].Version latestVersion = newVersions.Firefox[0].Version
} else if browserType == "Chromium" && len(newVersions.Chromium) > 0 { } else if browserType == "Chromium" && len(newVersions.Chromium) > 0 {
// Sort by usage descending
sort.Slice(newVersions.Chromium, func(i, j int) bool {
return newVersions.Chromium[i].Global > newVersions.Chromium[j].Global
})
latestVersion = newVersions.Chromium[0].Version latestVersion = newVersions.Chromium[0].Version
} }
// If we failed to detect the browser or have no data, just return the old UA // Update the user agent string with the new version
if browserType == "" || latestVersion == "" { return generateUserAgent(browserType, latestVersion)
return userAgent
}
// Create a new random OS-based UA string with the latest version
return generateUserAgent(browserType, latestVersion, r)
} }
// periodicAgentUpdate periodically refreshes browser data and user agents
func periodicAgentUpdate() { func periodicAgentUpdate() {
for { for {
// Sleep a random interval between 1 and 2 days // Sleep for a random interval between 1 and 2 days
r := rand.New(rand.NewSource(time.Now().UnixNano())) time.Sleep(time.Duration(24+rand.Intn(24)) * time.Hour)
time.Sleep(time.Duration(24+r.Intn(24)) * time.Hour)
// Fetch the latest browser versions // Fetch the latest browser versions
newVersions, err := fetchLatestBrowserVersions() newVersions, err := fetchLatestBrowserVersions()
@ -316,7 +296,6 @@ func periodicAgentUpdate() {
} }
} }
// GetUserAgent returns a cached UA for the given key or creates one if none exists.
func GetUserAgent(cacheKey string) (string, error) { func GetUserAgent(cacheKey string) (string, error) {
cache.RLock() cache.RLock()
userAgent, found := cache.data[cacheKey] userAgent, found := cache.data[cacheKey]
@ -335,11 +314,9 @@ func GetUserAgent(cacheKey string) (string, error) {
cache.data[cacheKey] = userAgent cache.data[cacheKey] = userAgent
cache.Unlock() cache.Unlock()
printDebug("Generated (cached or new) user agent: %s", userAgent)
return userAgent, nil return userAgent, nil
} }
// GetNewUserAgent always returns a newly generated UA, overwriting the cache.
func GetNewUserAgent(cacheKey string) (string, error) { func GetNewUserAgent(cacheKey string) (string, error) {
userAgent, err := randomUserAgent() userAgent, err := randomUserAgent()
if err != nil { if err != nil {
@ -350,7 +327,6 @@ func GetNewUserAgent(cacheKey string) (string, error) {
cache.data[cacheKey] = userAgent cache.data[cacheKey] = userAgent
cache.Unlock() cache.Unlock()
printDebug("Generated new user agent: %s", userAgent)
return userAgent, nil return userAgent, nil
} }

View file

@ -62,18 +62,6 @@ type ForumSearchResult struct {
ThumbnailSrc string `json:"thumbnailSrc,omitempty"` ThumbnailSrc string `json:"thumbnailSrc,omitempty"`
} }
type MusicResult struct {
URL string
Title string
Artist string
Description string
PublishedDate string
Thumbnail string
// AudioURL string
Source string
Duration string
}
// GeocodeCachedItem represents a geocoding result stored in the cache. // GeocodeCachedItem represents a geocoding result stored in the cache.
type GeocodeCachedItem struct { type GeocodeCachedItem struct {
Latitude string Latitude string
@ -135,11 +123,6 @@ func NewGeocodeCache() *GeocodeCache {
// Get retrieves the results for a given key from the cache. // Get retrieves the results for a given key from the cache.
func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) { func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
// Skip if RAM caching is disabled
if !config.RamCacheEnabled {
return nil, false
}
rc.mu.Lock() rc.mu.Lock()
defer rc.mu.Unlock() defer rc.mu.Unlock()
@ -160,11 +143,6 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
// Set stores the results for a given key in the cache. // Set stores the results for a given key in the cache.
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) { func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
// Skip if RAM caching is disabled
if !config.RamCacheEnabled {
return
}
rc.mu.Lock() rc.mu.Lock()
defer rc.mu.Unlock() defer rc.mu.Unlock()
@ -184,11 +162,6 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
// checkAndCleanCache removes items if memory usage exceeds the limit. // checkAndCleanCache removes items if memory usage exceeds the limit.
func (rc *ResultsCache) checkAndCleanCache() { func (rc *ResultsCache) checkAndCleanCache() {
// Skip if RAM caching is disabled
if !config.RamCacheEnabled {
return
}
if rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes { if rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
rc.cleanOldestItems() rc.cleanOldestItems()
} }
@ -206,11 +179,6 @@ func (rc *ResultsCache) currentMemoryUsage() uint64 {
// Get retrieves the geocoding result for a given query from the cache. // Get retrieves the geocoding result for a given query from the cache.
func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found bool, exists bool) { func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found bool, exists bool) {
// Skip if RAM caching is disabled
if !config.RamCacheEnabled {
return "", "", false, false
}
gc.mu.Lock() gc.mu.Lock()
defer gc.mu.Unlock() defer gc.mu.Unlock()
@ -230,11 +198,6 @@ func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found boo
} }
func (gc *GeocodeCache) Set(query, latitude, longitude string, found bool) { func (gc *GeocodeCache) Set(query, latitude, longitude string, found bool) {
// Skip if RAM caching is disabled
if !config.RamCacheEnabled {
return
}
gc.mu.Lock() gc.mu.Lock()
defer gc.mu.Unlock() defer gc.mu.Unlock()
@ -296,23 +259,15 @@ func convertToSearchResults(results interface{}) []SearchResult {
genericResults[i] = r genericResults[i] = r
} }
return genericResults return genericResults
case []MusicResult:
genericResults := make([]SearchResult, len(res))
for i, r := range res {
genericResults[i] = r
}
return genericResults
} }
return nil return nil
} }
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult, []MusicResult) { func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) {
var textResults []TextSearchResult var textResults []TextSearchResult
var torrentResults []TorrentResult var torrentResults []TorrentResult
var imageResults []ImageSearchResult var imageResults []ImageSearchResult
var forumResults []ForumSearchResult var forumResults []ForumSearchResult
var musicResults []MusicResult
for _, r := range results { for _, r := range results {
switch res := r.(type) { switch res := r.(type) {
case TextSearchResult: case TextSearchResult:
@ -323,9 +278,7 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor
imageResults = append(imageResults, res) imageResults = append(imageResults, res)
case ForumSearchResult: case ForumSearchResult:
forumResults = append(forumResults, res) forumResults = append(forumResults, res)
case MusicResult:
musicResults = append(musicResults, res)
} }
} }
return textResults, torrentResults, imageResults, forumResults, musicResults return textResults, torrentResults, imageResults, forumResults
} }

View file

@ -107,21 +107,3 @@ func GetIconPath() (string, string) {
// Default paths // Default paths
return "/static/images/icon.svg", "/static/images/icon.png" return "/static/images/icon.svg", "/static/images/icon.png"
} }
// FormatElapsedTime formats elapsed time as a string,
// using:
// - "> 0.01 ms" if under 49µs
// - "0.xx ms" if under 1ms
// - "xxx ms" if under 300ms
// - "x.xx seconds" otherwise
func FormatElapsedTime(elapsed time.Duration) string {
if elapsed < 49*time.Microsecond {
return fmt.Sprintf("> 0.01 %s", Translate("milliseconds"))
} else if elapsed < time.Millisecond {
ms := float64(elapsed.Microseconds()) / 1000.0
return fmt.Sprintf("%.2f %s", ms, Translate("milliseconds"))
} else if elapsed < 300*time.Millisecond {
return fmt.Sprintf("%d %s", elapsed.Milliseconds(), Translate("milliseconds"))
}
return fmt.Sprintf("%.2f %s", elapsed.Seconds(), Translate("seconds"))
}

139
config.go
View file

@ -22,43 +22,24 @@ type CacheConfig struct {
Path string Path string
} }
type MetaSearchConfig struct {
Text []string
Image []string
Files []string
Video []string
}
type Config struct { type Config struct {
Port int Port int // Added
AuthCode string AuthCode string // Added
PeerID string PeerID string // Added
Peers []string Peers []string
Domain string Domain string // Added
NodesEnabled bool NodesEnabled bool // Added
MetaSearchEnabled bool CrawlerEnabled bool // Added
IndexerEnabled bool IndexerEnabled bool // Added
WebsiteEnabled bool WebsiteEnabled bool // Added
RamCacheEnabled bool RamCacheEnabled bool
DriveCacheEnabled bool DriveCacheEnabled bool // Added
MetaProxyEnabled bool LogLevel int // Added
MetaProxyStrict bool
MetaProxyRetry int
MetaProxies []string
CrawlerProxyEnabled bool
CrawlerProxyStrict bool
CrawlerProxies []string
CrawlerProxyRetry int
// Maybye add Proxy support for Image Extraction?
LogLevel int
ConcurrentStandardCrawlers int ConcurrentStandardCrawlers int
ConcurrentChromeCrawlers int ConcurrentChromeCrawlers int
CrawlingInterval time.Duration // Refres crawled results in... CrawlingInterval time.Duration // Refres crawled results in...
MaxPagesPerDomain int // Max pages to crawl per domain MaxPagesPerDomain int // Max pages to crawl per domain
IndexBatchSize int IndexBatchSize int
LibreXInstances []string
MetaSearch MetaSearchConfig
DriveCache CacheConfig DriveCache CacheConfig
RamCache CacheConfig RamCache CacheConfig
@ -70,52 +51,17 @@ var defaultConfig = Config{
Peers: []string{}, Peers: []string{},
AuthCode: generateStrongRandomString(64), AuthCode: generateStrongRandomString(64),
NodesEnabled: false, NodesEnabled: false,
MetaSearchEnabled: true, CrawlerEnabled: true,
IndexerEnabled: false, IndexerEnabled: false,
WebsiteEnabled: true, WebsiteEnabled: true,
RamCacheEnabled: true, RamCacheEnabled: true,
DriveCacheEnabled: false, DriveCacheEnabled: false,
MetaProxyEnabled: false,
MetaProxyStrict: true,
MetaProxies: []string{},
MetaProxyRetry: 3,
CrawlerProxyEnabled: false,
CrawlerProxyStrict: true,
CrawlerProxies: []string{},
CrawlerProxyRetry: 1,
ConcurrentStandardCrawlers: 12, ConcurrentStandardCrawlers: 12,
ConcurrentChromeCrawlers: 4, ConcurrentChromeCrawlers: 4,
CrawlingInterval: 24 * time.Hour, CrawlingInterval: 24 * time.Hour,
MaxPagesPerDomain: 10, MaxPagesPerDomain: 10,
IndexBatchSize: 50, IndexBatchSize: 50,
LogLevel: 1, LogLevel: 1,
LibreXInstances: []string{"librex.antopie.org"},
MetaSearch: MetaSearchConfig{
// For Text search (skip SearXNG and LibreX by default, as that would be mega stupid)
Text: []string{"Google", "Brave", "DuckDuckGo"},
// For Image search
Image: []string{"Qwant", "Bing", "DeviantArt"},
// For Files search
Files: []string{"TorrentGalaxy", "ThePirateBay"},
// For Video (piped instances)
Video: []string{
"api.piped.yt",
"pipedapi.moomoo.me",
"pipedapi.darkness.services",
"pipedapi.kavin.rocks",
"piped-api.hostux.net",
"pipedapi.syncpundit.io",
"piped-api.cfe.re",
"pipedapi.in.projectsegfau.lt",
"piapi.ggtyler.dev",
"piped-api.codespace.cz",
"pipedapi.coldforge.xyz",
"pipedapi.osphost.fi",
},
},
DriveCache: CacheConfig{ DriveCache: CacheConfig{
Duration: 48 * time.Hour, // Added Duration: 48 * time.Hour, // Added
Path: "./cache", // Added Path: "./cache", // Added
@ -299,33 +245,14 @@ func saveConfig(config Config) {
// Features section // Features section
featuresSec := cfg.Section("Features") featuresSec := cfg.Section("Features")
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled)) featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.MetaSearchEnabled)) featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled)) featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled)) featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
featuresSec.Key("MetaProxy").SetValue(strconv.FormatBool(config.MetaProxyEnabled))
featuresSec.Key("CrawlerProxy").SetValue(strconv.FormatBool(config.CrawlerProxyEnabled))
// Proxies section
proxiesSec := cfg.Section("Proxies")
proxiesSec.Key("MetaProxyStrict").SetValue(strconv.FormatBool(config.MetaProxyStrict))
proxiesSec.Key("MetaProxies").SetValue(strings.Join(config.MetaProxies, ","))
proxiesSec.Key("CrawlerProxyStrict").SetValue(strconv.FormatBool(config.CrawlerProxyStrict))
proxiesSec.Key("CrawlerProxies").SetValue(strings.Join(config.CrawlerProxies, ","))
proxiesSec.Key("MetaProxyRetry").SetValue(strconv.Itoa(config.MetaProxyRetry))
proxiesSec.Key("CrawlerProxyRetry").SetValue(strconv.Itoa(config.CrawlerProxyRetry))
// MetaSearch section
metaSec := cfg.Section("MetaSearches")
metaSec.Key("LibreXInstances").SetValue(strings.Join(config.LibreXInstances, ","))
metaSec.Key("Text").SetValue(strings.Join(config.MetaSearch.Text, ","))
metaSec.Key("Image").SetValue(strings.Join(config.MetaSearch.Image, ","))
metaSec.Key("Files").SetValue(strings.Join(config.MetaSearch.Files, ","))
metaSec.Key("Video").SetValue(strings.Join(config.MetaSearch.Video, ","))
// Indexer section // Indexer section
indexerSec := cfg.Section("Indexer") indexerSec := cfg.Section("Indexer")
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers)) indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentChromeCrawlers)) indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String()) indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain)) indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize)) indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
@ -365,28 +292,11 @@ func loadConfig() Config {
// Features // Features
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled) nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
metaSearchEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.MetaSearchEnabled) crawlerEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.CrawlerEnabled)
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled) indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled) websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled) ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled) driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
metaProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("MetaProxy"), defaultConfig.MetaProxyEnabled)
crawlerProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("CrawlerProxy"), defaultConfig.CrawlerProxyEnabled)
// Proxies
metaProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("MetaProxyStrict"), defaultConfig.MetaProxyStrict)
metaProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("MetaProxies"), ""), ",")
crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
crawlerProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("CrawlerProxies"), ""), ",")
metaProxyRetry := getConfigValue(cfg.Section("Proxies").Key("MetaProxyRetry"), defaultConfig.MetaProxyRetry, strconv.Atoi)
crawlerProxyRetry := getConfigValue(cfg.Section("Proxies").Key("CrawlerProxyRetry"), defaultConfig.CrawlerProxyRetry, strconv.Atoi)
// MetaSearch
searchXInstances := strings.Split(getConfigValueString(cfg.Section("MetaSearches").Key("LibreXInstances"), strings.Join(defaultConfig.LibreXInstances, ",")), ",")
textList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Text"), strings.Join(defaultConfig.MetaSearch.Text, ",")), ",")
imageList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Image"), strings.Join(defaultConfig.MetaSearch.Image, ",")), ",")
filesList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Files"), strings.Join(defaultConfig.MetaSearch.Files, ",")), ",")
videoList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Video"), strings.Join(defaultConfig.MetaSearch.Video, ",")), ",")
// Indexing // Indexing
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi) concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
@ -415,31 +325,16 @@ func loadConfig() Config {
AuthCode: authCode, AuthCode: authCode,
Peers: peers, Peers: peers,
NodesEnabled: nodesEnabled, NodesEnabled: nodesEnabled,
MetaSearchEnabled: metaSearchEnabled, CrawlerEnabled: crawlerEnabled,
IndexerEnabled: indexerEnabled, IndexerEnabled: indexerEnabled,
WebsiteEnabled: websiteEnabled, WebsiteEnabled: websiteEnabled,
RamCacheEnabled: ramCacheEnabled, RamCacheEnabled: ramCacheEnabled,
DriveCacheEnabled: driveCacheEnabled, DriveCacheEnabled: driveCacheEnabled,
MetaProxyEnabled: metaProxyEnabled,
MetaProxyStrict: metaProxyStrict,
MetaProxies: metaProxies,
MetaProxyRetry: metaProxyRetry,
CrawlerProxyEnabled: crawlerProxyEnabled,
CrawlerProxyStrict: crawlerProxyStrict,
CrawlerProxies: crawlerProxies,
CrawlerProxyRetry: crawlerProxyRetry,
ConcurrentStandardCrawlers: concurrentStandardCrawlers, ConcurrentStandardCrawlers: concurrentStandardCrawlers,
ConcurrentChromeCrawlers: concurrentChromeCrawlers, ConcurrentChromeCrawlers: concurrentChromeCrawlers,
CrawlingInterval: crawlingInterval, CrawlingInterval: crawlingInterval,
MaxPagesPerDomain: maxPagesPerDomain, MaxPagesPerDomain: maxPagesPerDomain,
IndexBatchSize: indexBatchSize, IndexBatchSize: indexBatchSize,
LibreXInstances: searchXInstances,
MetaSearch: MetaSearchConfig{
Text: textList,
Image: imageList,
Files: filesList,
Video: videoList,
},
DriveCache: CacheConfig{ DriveCache: CacheConfig{
Duration: driveDuration, Duration: driveDuration,
MaxUsageBytes: driveMaxUsage, MaxUsageBytes: driveMaxUsage,

View file

@ -32,12 +32,8 @@ func fetchPageMetadataStandard(pageURL, userAgent string) (string, string, strin
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages. // fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) { func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
// Create a custom allocator context for Chromedp with proxy support if enabled // Create context
allocCtx, cancelAlloc := chromedp.NewExecAllocator(context.Background(), configureChromeOptions()...) ctx, cancel := chromedp.NewContext(context.Background())
defer cancelAlloc()
// Create a browser context
ctx, cancel := chromedp.NewContext(allocCtx)
defer cancel() defer cancel()
var renderedHTML string var renderedHTML string
@ -61,36 +57,9 @@ func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string)
return extractParsedDOM(doc) return extractParsedDOM(doc)
} }
// configureChromeOptions sets up Chrome options and proxy if CrawlerProxy is enabled.
func configureChromeOptions() []chromedp.ExecAllocatorOption {
options := chromedp.DefaultExecAllocatorOptions[:]
// This code is not using config.CrawlerProxyRetry
if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
// Retrieve proxy settings from CrawlerProxy
proxy := crawlerProxyClient.GetProxy() // Ensure a `GetProxy` method is implemented for your proxy client
if proxy != "" {
options = append(options, chromedp.ProxyServer(proxy))
printDebug("Using CrawlerProxy for Chromedp: %s", proxy)
} else {
printWarn("CrawlerProxy is enabled but no valid proxy is available")
}
}
// // Add additional Chrome
// options = append(options,
// chromedp.Flag("headless", true),
// chromedp.Flag("disable-gpu", true),
// chromedp.Flag("no-sandbox", true),
// chromedp.Flag("disable-setuid-sandbox", true),
// )
return options
}
// extractStandard does the normal HTML parse with OG, Twitter, etc. // extractStandard does the normal HTML parse with OG, Twitter, etc.
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) { func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
client := &http.Client{Timeout: 15 * time.Second}
req, err := http.NewRequest("GET", pageURL, nil) req, err := http.NewRequest("GET", pageURL, nil)
if err != nil { if err != nil {
printDebug("Failed to create request for %s: %v", pageURL, err) printDebug("Failed to create request for %s: %v", pageURL, err)
@ -99,8 +68,7 @@ func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
req.Header.Set("User-Agent", userAgent) req.Header.Set("User-Agent", userAgent)
req.Header.Set("Accept-Language", "en-US,en;q=0.9") req.Header.Set("Accept-Language", "en-US,en;q=0.9")
// Use CrawlerProxy if enabled resp, err := client.Do(req)
resp, err := DoCrawlerProxyRequest(req)
if err != nil { if err != nil {
printDebug("Failed to GET %s: %v", pageURL, err) printDebug("Failed to GET %s: %v", pageURL, err)
return return
@ -208,6 +176,7 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
return title, desc, keywords return title, desc, keywords
} }
client := &http.Client{Timeout: 15 * time.Second}
readReq, err := http.NewRequest("GET", pageURL, nil) readReq, err := http.NewRequest("GET", pageURL, nil)
if err != nil { if err != nil {
printDebug("Failed to create fallbackReadability request: %v", err) printDebug("Failed to create fallbackReadability request: %v", err)
@ -216,16 +185,14 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
readReq.Header.Set("User-Agent", userAgent) readReq.Header.Set("User-Agent", userAgent)
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9") readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
// Use CrawlerProxy if enabled readResp, err := client.Do(readReq)
readResp, err := DoCrawlerProxyRequest(readReq) if err != nil || readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
if err != nil { if err != nil {
printDebug("go-readability GET error for %s: %v", pageURL, err) printDebug("go-readability GET error for %s: %v", pageURL, err)
return title, desc, keywords }
} if readResp != nil {
readResp.Body.Close()
if readResp.StatusCode < 200 || readResp.StatusCode >= 300 { }
printDebug("go-readability GET returned status %d for %s", readResp.StatusCode, pageURL)
readResp.Body.Close() // Safely close body
return title, desc, keywords return title, desc, keywords
} }
defer readResp.Body.Close() defer readResp.Body.Close()

View file

@ -57,34 +57,31 @@ func (t *ThePirateBay) Search(query string, category string) ([]TorrentResult, e
return []TorrentResult{}, nil return []TorrentResult{}, nil
} }
searchURL := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode) url := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
// User Agent generation // User Agent generation
userAgent, err := GetUserAgent("files-tpb") userAgent, err := GetUserAgent("files-tpb")
if err != nil { if err != nil {
return nil, fmt.Errorf("error generating User-Agent: %w", err) fmt.Println("Error:", err)
return nil, err
} }
req, err := http.NewRequest("GET", searchURL, nil) req, err := http.NewRequest("GET", url, nil)
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating request: %w", err) return nil, err
} }
req.Header.Set("User-Agent", userAgent) req.Header.Set("User-Agent", userAgent)
// Perform the request using MetaProxy if enabled client := &http.Client{}
resp, err := DoMetaProxyRequest(req) response, err := client.Do(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("error making request to The Pirate Bay: %w", err) return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
defer response.Body.Close()
var torrentData []map[string]interface{} var torrentData []map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&torrentData); err != nil { if err := json.NewDecoder(response.Body).Decode(&torrentData); err != nil {
return nil, fmt.Errorf("error decoding response JSON: %w", err) return nil, err
} }
var results []TorrentResult var results []TorrentResult

View file

@ -62,17 +62,18 @@ func (tg *TorrentGalaxy) Search(query string, category string) ([]TorrentResult,
// User Agent generation // User Agent generation
userAgent, err := GetUserAgent("files-torrentgalaxy") userAgent, err := GetUserAgent("files-torrentgalaxy")
if err != nil { if err != nil {
return nil, fmt.Errorf("error generating User-Agent: %w", err) fmt.Println("Error:", err)
return nil, err
} }
req, err := http.NewRequest("GET", searchURL, nil) req, err := http.NewRequest("GET", searchURL, nil)
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating request: %w", err) return nil, err
} }
req.Header.Set("User-Agent", userAgent) req.Header.Set("User-Agent", userAgent)
// Perform the request using MetaProxy if enabled client := &http.Client{}
resp, err := DoMetaProxyRequest(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err) return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
} }

View file

@ -30,25 +30,11 @@ var (
var fileResultsChan = make(chan []TorrentResult) var fileResultsChan = make(chan []TorrentResult)
func initFileEngines() { func init() {
torrentGalaxy = NewTorrentGalaxy()
torrentGalaxy = nil // nyaa = NewNyaa()
thePirateBay = nil thePirateBay = NewThePirateBay()
// nyaa = nil // rutor = NewRutor()
// rutor = nil
for _, engineName := range config.MetaSearch.Files {
switch engineName {
case "TorrentGalaxy":
torrentGalaxy = NewTorrentGalaxy()
case "ThePirateBay":
thePirateBay = NewThePirateBay()
// case "Nyaa":
// nyaa = NewNyaa()
// case "Rutor":
// rutor = NewRutor()
}
}
} }
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) { func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
@ -66,7 +52,7 @@ func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string
data := map[string]interface{}{ data := map[string]interface{}{
"Results": combinedResults, "Results": combinedResults,
"Query": query, "Query": query,
"Fetched": FormatElapsedTime(elapsedTime), "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
"Category": "all", "Category": "all",
"Sort": "seed", "Sort": "seed",
"Page": page, "Page": page,
@ -102,7 +88,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
case results := <-cacheChan: case results := <-cacheChan:
if results == nil { if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled // Fetch only if the cache miss occurs and Crawler is enabled
if config.MetaSearchEnabled { if config.CrawlerEnabled {
combinedResults = fetchFileResults(query, safe, lang, page) combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 { if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@ -111,12 +97,12 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
printDebug("Crawler disabled; skipping fetching.") printDebug("Crawler disabled; skipping fetching.")
} }
} else { } else {
_, torrentResults, _, _, _ := convertToSpecificResults(results) _, torrentResults, _, _ := convertToSpecificResults(results)
combinedResults = torrentResults combinedResults = torrentResults
} }
case <-time.After(2 * time.Second): case <-time.After(2 * time.Second):
printDebug("Cache check timeout") printDebug("Cache check timeout")
if config.MetaSearchEnabled { if config.CrawlerEnabled {
combinedResults = fetchFileResults(query, safe, lang, page) combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 { if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@ -131,13 +117,13 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
func fetchFileResults(query, safe, lang string, page int) []TorrentResult { func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
// If Crawler is disabled, skip fetching from torrent sites // If Crawler is disabled, skip fetching from torrent sites
if !config.MetaSearchEnabled { if !config.CrawlerEnabled {
printInfo("Crawler is disabled; skipping torrent site fetching.") printInfo("Crawler is disabled; skipping torrent site fetching.")
return []TorrentResult{} return []TorrentResult{}
} }
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor} sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
var results []TorrentResult results := []TorrentResult{}
for _, site := range sites { for _, site := range sites {
if site == nil { if site == nil {
@ -154,12 +140,9 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
} }
} }
// If no results, try from other nodes
if len(results) == 0 { if len(results) == 0 {
if config.NodesEnabled { printWarn("No file results found for query: %s, trying other nodes", query)
printWarn("No file results found for query: %s, trying other nodes", query) results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
}
} }
return results return results

View file

@ -3,57 +3,54 @@ package main
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"math"
"net/http" "net/http"
"net/url" "net/url"
"time" "time"
) )
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) { func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
if !config.MetaSearchEnabled { if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping forum search.") printDebug("Crawler is disabled; skipping forum search.")
return []ForumSearchResult{}, nil return []ForumSearchResult{}, nil
} }
const ( const (
pageSize = 25 pageSize = 25
baseURL = "https://www.reddit.com" baseURL = "https://www.reddit.com"
maxRetries = 5
initialBackoff = 2 * time.Second
) )
var results []ForumSearchResult var results []ForumSearchResult
offset := page * pageSize
searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d",
baseURL,
url.QueryEscape(query),
pageSize,
offset,
)
// Create request searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
req, err := http.NewRequest("GET", searchURL, nil) var resp *http.Response
if err != nil { var err error
return nil, fmt.Errorf("creating request: %v", err)
// Retry logic with exponential backoff
for i := 0; i <= maxRetries; i++ {
resp, err = http.Get(searchURL)
if err != nil {
return nil, fmt.Errorf("making request: %v", err)
}
if resp.StatusCode != http.StatusTooManyRequests {
break
}
// Wait for some time before retrying
backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
time.Sleep(backoff)
} }
// Set User-Agent
userAgent, uaErr := GetUserAgent("Reddit-Forum-Search")
if uaErr != nil {
return nil, fmt.Errorf("getting user agent: %v", uaErr)
}
req.Header.Set("User-Agent", userAgent)
// Make request using MetaProxy logic
resp, err := DoMetaProxyRequest(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("making request: %v", err) return nil, fmt.Errorf("making request: %v", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
// Validate response status
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
// Parse JSON response
var searchResults map[string]interface{} var searchResults map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil { if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
return nil, fmt.Errorf("decoding response: %v", err) return nil, fmt.Errorf("decoding response: %v", err)
@ -69,9 +66,9 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
return nil, fmt.Errorf("no children field in data") return nil, fmt.Errorf("no children field in data")
} }
// Extract search results
for _, post := range posts { for _, post := range posts {
postData := post.(map[string]interface{})["data"].(map[string]interface{}) postData := post.(map[string]interface{})["data"].(map[string]interface{})
if safe == "active" && postData["over_18"].(bool) { if safe == "active" && postData["over_18"].(bool) {
continue continue
} }
@ -81,7 +78,6 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
if len(description) > 500 { if len(description) > 500 {
description = description[:500] + "..." description = description[:500] + "..."
} }
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0) publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
permalink := postData["permalink"].(string) permalink := postData["permalink"].(string)
resultURL := fmt.Sprintf("%s%s", baseURL, permalink) resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
@ -120,7 +116,7 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
"Query": query, "Query": query,
"Results": results, "Results": results,
"Page": page, "Page": page,
"Fetched": FormatElapsedTime(elapsedTime), "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
"HasPrevPage": page > 1, "HasPrevPage": page > 1,
"HasNextPage": len(results) >= 25, "HasNextPage": len(results) >= 25,
"NoResults": len(results) == 0, "NoResults": len(results) == 0,
@ -154,7 +150,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
case results := <-cacheChan: case results := <-cacheChan:
if results == nil { if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled // Fetch only if the cache miss occurs and Crawler is enabled
if config.MetaSearchEnabled { if config.CrawlerEnabled {
combinedResults = fetchForumResults(query, safe, lang, page) combinedResults = fetchForumResults(query, safe, lang, page)
if len(combinedResults) > 0 { if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@ -168,7 +164,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
} }
case <-time.After(2 * time.Second): case <-time.After(2 * time.Second):
printDebug("Cache check timeout") printDebug("Cache check timeout")
if config.MetaSearchEnabled { if config.CrawlerEnabled {
combinedResults = fetchForumResults(query, safe, lang, page) combinedResults = fetchForumResults(query, safe, lang, page)
if len(combinedResults) > 0 { if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))

View file

@ -18,21 +18,8 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
// Build the search URL // Build the search URL
searchURL := buildBingSearchURL(query, page) searchURL := buildBingSearchURL(query, page)
// Create the HTTP request // Make the HTTP request
req, err := http.NewRequest("GET", searchURL, nil) resp, err := http.Get(searchURL)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
// Set User-Agent
ImageUserAgent, err := GetUserAgent("Image-Search-Bing")
if err != nil {
return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
}
req.Header.Set("User-Agent", ImageUserAgent)
// Use MetaProxy if enabled
resp, err := DoMetaProxyRequest(req)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err) return nil, 0, fmt.Errorf("making request: %v", err)
} }

View file

@ -87,15 +87,15 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
return nil, 0, err return nil, 0, err
} }
// Create the HTTP request // Make the HTTP request with User-Agent header
client := &http.Client{}
req, err := http.NewRequest("GET", searchURL, nil) req, err := http.NewRequest("GET", searchURL, nil)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err) return nil, 0, fmt.Errorf("creating request: %v", err)
} }
req.Header.Set("User-Agent", DeviantArtImageUserAgent) req.Header.Set("User-Agent", DeviantArtImageUserAgent)
// Perform the request using MetaProxy if enabled resp, err := client.Do(req)
resp, err := DoMetaProxyRequest(req)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err) return nil, 0, fmt.Errorf("making request: %v", err)
} }
@ -182,7 +182,7 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
duration := time.Since(startTime) duration := time.Since(startTime)
// Check if the number of results is zero // Check if the number of results is one or less
if len(results) == 0 { if len(results) == 0 {
return nil, duration, fmt.Errorf("no images found") return nil, duration, fmt.Errorf("no images found")
} }

View file

@ -18,21 +18,7 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
var results []ImageSearchResult var results []ImageSearchResult
searchURL := buildImgurSearchURL(query, page) searchURL := buildImgurSearchURL(query, page)
// Create the HTTP request resp, err := http.Get(searchURL)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
// Get the User-Agent string
imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
if err != nil {
return nil, 0, fmt.Errorf("getting user-agent: %v", err)
}
req.Header.Set("User-Agent", imgurUserAgent)
// Perform the HTTP request with MetaProxy if enabled
resp, err := DoMetaProxyRequest(req)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err) return nil, 0, fmt.Errorf("making request: %v", err)
} }
@ -42,7 +28,6 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
// Parse the HTML document
doc, err := goquery.NewDocumentFromReader(resp.Body) doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err) return nil, 0, fmt.Errorf("loading HTML document: %v", err)
@ -91,35 +76,12 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
duration := time.Since(startTime) // Calculate the duration duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
return nil, duration, fmt.Errorf("no images found")
}
return results, duration, nil return results, duration, nil
} }
// scrapeImageFromImgurPage scrapes the image source from the Imgur page // scrapeImageFromImgurPage scrapes the image source from the Imgur page
func scrapeImageFromImgurPage(pageURL string) string { func scrapeImageFromImgurPage(pageURL string) string {
req, err := http.NewRequest("GET", pageURL, nil) resp, err := http.Get(pageURL)
if err != nil {
fmt.Printf("Error creating request for page: %v\n", err)
return ""
}
// Get the User-Agent string
imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
if err == nil {
req.Header.Set("User-Agent", imgurUserAgent)
}
// Perform the request using MetaProxy if enabled
var resp *http.Response
if config.MetaProxyEnabled && metaProxyClient != nil {
resp, err = metaProxyClient.Do(req)
} else {
client := &http.Client{}
resp, err = client.Do(req)
}
if err != nil { if err != nil {
fmt.Printf("Error fetching page: %v\n", err) fmt.Printf("Error fetching page: %v\n", err)
return "" return ""

View file

@ -97,7 +97,7 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
// Ensure count + offset is within acceptable limits // Ensure count + offset is within acceptable limits
if offset+resultsPerPage > 250 { if offset+resultsPerPage > 250 {
return nil, 0, fmt.Errorf("count + offset must be lower than 250 for Qwant") return nil, 0, fmt.Errorf("count + offset must be lower than 250 for quant")
} }
if safe == "" { if safe == "" {
@ -113,21 +113,21 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
offset, offset,
safe) safe)
// Create the HTTP request client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", apiURL, nil) req, err := http.NewRequest("GET", apiURL, nil)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err) return nil, 0, fmt.Errorf("creating request: %v", err)
} }
// Get the User-Agent string
ImageUserAgent, err := GetUserAgent("Image-Search-Quant") ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("getting user-agent: %v", err) return nil, 0, err
} }
req.Header.Set("User-Agent", ImageUserAgent)
// Perform the request with MetaProxy if enabled req.Header.Set("User-Agent", ImageUserAgent) // Quant seems to not like some specific User-Agent strings
resp, err := DoMetaProxyRequest(req)
resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err) return nil, 0, fmt.Errorf("making request: %v", err)
} }
@ -137,13 +137,11 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
// Parse the API response
var apiResp QwantAPIResponse var apiResp QwantAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil { if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
return nil, 0, fmt.Errorf("decoding response: %v", err) return nil, 0, fmt.Errorf("decoding response: %v", err)
} }
// Process the results
var wg sync.WaitGroup var wg sync.WaitGroup
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items)) results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
@ -176,9 +174,5 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
duration := time.Since(startTime) // Calculate the duration duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
return nil, duration, fmt.Errorf("no images found")
}
return results, duration, nil return results, duration, nil
} }

View file

@ -10,23 +10,12 @@ import (
var imageSearchEngines []SearchEngine var imageSearchEngines []SearchEngine
var allImageSearchEngines = []SearchEngine{ func init() {
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)}, imageSearchEngines = []SearchEngine{
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)}, {Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)}, {Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
// {Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // example {Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
} //{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
func initImageEngines() {
imageSearchEngines = nil
for _, engineName := range config.MetaSearch.Image {
for _, candidate := range allImageSearchEngines {
if candidate.Name == engineName {
imageSearchEngines = append(imageSearchEngines, candidate)
break
}
}
} }
} }
@ -55,7 +44,7 @@ func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSett
data := map[string]interface{}{ data := map[string]interface{}{
"Results": combinedResults, "Results": combinedResults,
"Query": query, "Query": query,
"Fetched": FormatElapsedTime(elapsedTime), "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
"Page": page, "Page": page,
"HasPrevPage": page > 1, "HasPrevPage": page > 1,
"HasNextPage": len(combinedResults) >= 50, "HasNextPage": len(combinedResults) >= 50,
@ -97,7 +86,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
select { select {
case results := <-cacheChan: case results := <-cacheChan:
if results == nil { if results == nil {
if config.MetaSearchEnabled { if config.CrawlerEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous) combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 { if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults) combinedResults = filterValidImages(combinedResults)
@ -107,12 +96,12 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
printDebug("Crawler disabled; skipping fetching from image search engines.") printDebug("Crawler disabled; skipping fetching from image search engines.")
} }
} else { } else {
_, _, imageResults, _, _ := convertToSpecificResults(results) _, _, imageResults, _ := convertToSpecificResults(results)
combinedResults = filterValidImages(imageResults) combinedResults = filterValidImages(imageResults)
} }
case <-time.After(2 * time.Second): case <-time.After(2 * time.Second):
printDebug("Cache check timeout") printDebug("Cache check timeout")
if config.MetaSearchEnabled { if config.CrawlerEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous) combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 { if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults) combinedResults = filterValidImages(combinedResults)
@ -129,8 +118,8 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult { func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
var results []ImageSearchResult var results []ImageSearchResult
// Check if MetaSearchEnabled is false // Check if CrawlerEnabled is false
if !config.MetaSearchEnabled { if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping image search engine fetching.") printDebug("Crawler is disabled; skipping image search engine fetching.")
return results return results
} }

26
init.go
View file

@ -13,16 +13,10 @@ func main() {
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)") portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)") domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini") skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
// Parse command-line flags // Parse command-line flags
flag.Parse() flag.Parse()
// Override global configFilePath if --config flag is provided
if *configFlag != "" {
configFilePath = *configFlag
}
if *skipConfigFlag { if *skipConfigFlag {
// Skip interactive configuration // Skip interactive configuration
if _, err := os.Stat(configFilePath); err == nil { if _, err := os.Stat(configFilePath); err == nil {
@ -66,24 +60,11 @@ func main() {
} }
config.PeerID = hostID config.PeerID = hostID
if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
InitProxies()
}
// Initiate Browser Agent updater // Initiate Browser Agent updater
if config.MetaSearchEnabled || config.IndexerEnabled { if config.CrawlerEnabled || config.IndexerEnabled {
go periodicAgentUpdate() go periodicAgentUpdate()
} }
// Load List of Meta Search Engines
if config.MetaSearchEnabled {
initTextEngines()
initImageEngines()
initFileEngines()
initPipedInstances()
initMusicEngines()
}
InitializeLanguage("en") // Initialize language before generating OpenSearch InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config) generateOpenSearchXML(config)
@ -143,6 +124,11 @@ func main() {
webCrawlerInit() webCrawlerInit()
// No longer needed as crawled data are indexed imidietly
// // Start periodic indexing (every 2 minutes)
// dataFilePath := filepath.Join(config.DriveCache.Path, "data_to_index.txt")
// startPeriodicIndexing(dataFilePath, 2*time.Minute)
printInfo("Indexer is enabled.") printInfo("Indexer is enabled.")
} else { } else {
printInfo("Indexer is disabled.") printInfo("Indexer is disabled.")

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Soek vir nuwe resultate" msgstr "Soek vir nuwe resultate..."
msgid "previous" msgid "previous"
msgstr "Vorige" msgstr "Vorige"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "تورنتات" msgstr "تورنتات"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "جاري البحث عن نتائج جديدة" msgstr "جاري البحث عن نتائج جديدة..."
msgid "previous" msgid "previous"
msgstr "السابق" msgstr "السابق"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торэнты" msgstr "Торэнты"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Пошук новых вынікаў" msgstr "Пошук новых вынікаў..."
msgid "previous" msgid "previous"
msgstr "Папярэдняе" msgstr "Папярэдняе"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торенти" msgstr "Торенти"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Търсят се нови резултати" msgstr "Търсят се нови резултати..."
msgid "previous" msgid "previous"
msgstr "Предишен" msgstr "Предишен"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Cercant nous resultats" msgstr "Cercant nous resultats..."
msgid "previous" msgid "previous"
msgstr "Anterior" msgstr "Anterior"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenty" msgstr "Torrenty"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Hledám nové výsledky" msgstr "Hledám nové výsledky..."
msgid "previous" msgid "previous"
msgstr "Předchozí" msgstr "Předchozí"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenter" msgstr "Torrenter"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Søger efter nye resultater" msgstr "Søger efter nye resultater..."
msgid "previous" msgid "previous"
msgstr "Forrige" msgstr "Forrige"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Suche nach neuen Ergebnissen" msgstr "Suche nach neuen Ergebnissen..."
msgid "previous" msgid "previous"
msgstr "Vorherige" msgstr "Vorherige"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Αναζήτηση νέων αποτελεσμάτων" msgstr "Αναζήτηση νέων αποτελεσμάτων..."
msgid "previous" msgid "previous"
msgstr "Προηγούμενο" msgstr "Προηγούμενο"

View file

@ -88,9 +88,6 @@ msgstr "Video"
msgid "videos" msgid "videos"
msgstr "Videos" msgstr "Videos"
msgid "music"
msgstr "Music"
msgid "forum" msgid "forum"
msgstr "Forum" msgstr "Forum"
@ -110,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Searching for new results" msgstr "Searching for new results..."
msgid "previous" msgid "previous"
msgstr "Previous" msgstr "Previous"
@ -119,13 +116,7 @@ msgid "next"
msgstr "Next" msgstr "Next"
msgid "fetched_in" msgid "fetched_in"
msgstr "Fetched in %s" msgstr "Fetched in %s seconds"
msgid "seconds"
msgstr "seconds"
msgid "milliseconds"
msgstr "milliseconds"
msgid "sort_seeders" msgid "sort_seeders"
msgstr "Number of Seeders" msgstr "Number of Seeders"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torentoj" msgstr "Torentoj"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Serĉante novajn rezultojn" msgstr "Serĉante novajn rezultojn..."
msgid "previous" msgid "previous"
msgstr "Antaŭa" msgstr "Antaŭa"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Buscando nuevos resultados" msgstr "Buscando nuevos resultados..."
msgid "previous" msgid "previous"
msgstr "Anterior" msgstr "Anterior"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrendid" msgstr "Torrendid"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Otsitakse uusi tulemusi" msgstr "Otsitakse uusi tulemusi..."
msgid "previous" msgid "previous"
msgstr "Eelmine" msgstr "Eelmine"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "تورنت‌ها" msgstr "تورنت‌ها"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "در حال جستجوی نتایج جدید" msgstr "در حال جستجوی نتایج جدید..."
msgid "previous" msgid "previous"
msgstr "قبلی" msgstr "قبلی"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrentit" msgstr "Torrentit"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Haetaan uusia tuloksia" msgstr "Haetaan uusia tuloksia..."
msgid "previous" msgid "previous"
msgstr "Edellinen" msgstr "Edellinen"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Recherche de nouveaux résultats" msgstr "Recherche de nouveaux résultats..."
msgid "previous" msgid "previous"
msgstr "Précédent" msgstr "Précédent"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "टोरेंट्स" msgstr "टोरेंट्स"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "नए परिणामों की खोज कर रहे हैं" msgstr "नए परिणामों की खोज कर रहे हैं..."
msgid "previous" msgid "previous"
msgstr "पिछला" msgstr "पिछला"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenti" msgstr "Torrenti"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Traže se novi rezultati" msgstr "Traže se novi rezultati..."
msgid "previous" msgid "previous"
msgstr "Prethodno" msgstr "Prethodno"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Új találatok keresése" msgstr "Új találatok keresése..."
msgid "previous" msgid "previous"
msgstr "Előző" msgstr "Előző"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Թորրենտներ" msgstr "Թորրենտներ"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Նոր արդյունքներ որոնվում են" msgstr "Նոր արդյունքներ որոնվում են..."
msgid "previous" msgid "previous"
msgstr "Նախորդը" msgstr "Նախորդը"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrent" msgstr "Torrent"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Mencari hasil baru" msgstr "Mencari hasil baru..."
msgid "previous" msgid "previous"
msgstr "Sebelumnya" msgstr "Sebelumnya"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrent" msgstr "Torrent"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Ricerca di nuovi risultati" msgstr "Ricerca di nuovi risultati..."
msgid "previous" msgid "previous"
msgstr "Precedente" msgstr "Precedente"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "טורנטים" msgstr "טורנטים"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "מחפש תוצאות חדשות" msgstr "מחפש תוצאות חדשות..."
msgid "previous" msgid "previous"
msgstr "הקודם" msgstr "הקודם"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "トレント" msgstr "トレント"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "新しい結果を検索中" msgstr "新しい結果を検索中..."
msgid "previous" msgid "previous"
msgstr "前" msgstr "前"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "토렌트" msgstr "토렌트"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "새로운 결과를 검색 중" msgstr "새로운 결과를 검색 중..."
msgid "previous" msgid "previous"
msgstr "이전" msgstr "이전"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrentai" msgstr "Torrentai"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Ieškoma naujų rezultatų" msgstr "Ieškoma naujų rezultatų..."
msgid "previous" msgid "previous"
msgstr "Ankstesnis" msgstr "Ankstesnis"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torenti" msgstr "Torenti"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Meklē jaunus rezultātus" msgstr "Meklē jaunus rezultātus..."
msgid "previous" msgid "previous"
msgstr "Iepriekšējais" msgstr "Iepriekšējais"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Nieuwe resultaten zoeken" msgstr "Nieuwe resultaten zoeken..."
msgid "previous" msgid "previous"
msgstr "Vorige" msgstr "Vorige"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenter" msgstr "Torrenter"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Søker etter nye resultater" msgstr "Søker etter nye resultater..."
msgid "previous" msgid "previous"
msgstr "Forrige" msgstr "Forrige"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenty" msgstr "Torrenty"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Wyszukiwanie nowych wyników" msgstr "Wyszukiwanie nowych wyników..."
msgid "previous" msgid "previous"
msgstr "Poprzednie" msgstr "Poprzednie"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Procurando por novos resultados" msgstr "Procurando por novos resultados..."
msgid "previous" msgid "previous"
msgstr "Anterior" msgstr "Anterior"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenturi" msgstr "Torrenturi"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Caut rezultate noi" msgstr "Caut rezultate noi..."
msgid "previous" msgid "previous"
msgstr "Anterior" msgstr "Anterior"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торренты" msgstr "Торренты"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Идёт поиск новых результатов" msgstr "Идёт поиск новых результатов..."
msgid "previous" msgid "previous"
msgstr "Предыдущий" msgstr "Предыдущий"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenty" msgstr "Torrenty"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Hľadám nové výsledky" msgstr "Hľadám nové výsledky..."
msgid "previous" msgid "previous"
msgstr "Predchádzajúce" msgstr "Predchádzajúce"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenti" msgstr "Torrenti"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Iskanje novih rezultatov" msgstr "Iskanje novih rezultatov..."
msgid "previous" msgid "previous"
msgstr "Prejšnje" msgstr "Prejšnje"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торенти" msgstr "Торенти"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Тражење нових резултата" msgstr "Тражење нових резултата..."
msgid "previous" msgid "previous"
msgstr "Претходно" msgstr "Претходно"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Söker efter nya resultat" msgstr "Söker efter nya resultat..."
msgid "previous" msgid "previous"
msgstr "Föregående" msgstr "Föregående"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torenti" msgstr "Torenti"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Inatafuta matokeo mapya" msgstr "Inatafuta matokeo mapya..."
msgid "previous" msgid "previous"
msgstr "Ya awali" msgstr "Ya awali"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "ทอร์เรนต์" msgstr "ทอร์เรนต์"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "กำลังค้นหาผลลัพธ์ใหม่" msgstr "กำลังค้นหาผลลัพธ์ใหม่..."
msgid "previous" msgid "previous"
msgstr "ก่อนหน้า" msgstr "ก่อนหน้า"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Mga Torrents" msgstr "Mga Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Naghahanap ng mga bagong resulta" msgstr "Naghahanap ng mga bagong resulta..."
msgid "previous" msgid "previous"
msgstr "Nakaraan" msgstr "Nakaraan"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrentler" msgstr "Torrentler"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Yeni sonuçlar aranıyor" msgstr "Yeni sonuçlar aranıyor..."
msgid "previous" msgid "previous"
msgstr "Önceki" msgstr "Önceki"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торренти" msgstr "Торренти"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Шукаю нові результати" msgstr "Шукаю нові результати..."
msgid "previous" msgid "previous"
msgstr "Попередній" msgstr "Попередній"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents" msgstr "Torrents"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "Đang tìm kiếm kết quả mới" msgstr "Đang tìm kiếm kết quả mới..."
msgid "previous" msgid "previous"
msgstr "Trước" msgstr "Trước"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "种子" msgstr "种子"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "正在搜索新结果" msgstr "正在搜索新结果..."
msgid "previous" msgid "previous"
msgstr "上一页" msgstr "上一页"

View file

@ -107,7 +107,7 @@ msgid "torrents"
msgstr "種子" msgstr "種子"
msgid "searching_for_new_results" msgid "searching_for_new_results"
msgstr "正在搜尋新結果" msgstr "正在搜尋新結果..."
msgid "previous" msgid "previous"
msgstr "上一頁" msgstr "上一頁"

View file

@ -164,8 +164,6 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
handleImageSearch(w, r, settings, query, page) handleImageSearch(w, r, settings, query, page)
case "video": case "video":
handleVideoSearch(w, settings, query, page) handleVideoSearch(w, settings, query, page)
case "music":
handleMusicSearch(w, settings, query, page)
case "map": case "map":
handleMapSearch(w, settings, query) handleMapSearch(w, settings, query)
case "forum": case "forum":
@ -228,7 +226,7 @@ func runServer() {
w.Header().Set("Content-Type", "application/opensearchdescription+xml") w.Header().Set("Content-Type", "application/opensearchdescription+xml")
http.ServeFile(w, r, "static/opensearch.xml") http.ServeFile(w, r, "static/opensearch.xml")
}) })
printInfo("Website is enabled.") printInfo("Website functionality enabled.")
} else { } else {
// Redirect all website routes to a "service disabled" handler // Redirect all website routes to a "service disabled" handler
http.HandleFunc("/static/", handleWebsiteDisabled) http.HandleFunc("/static/", handleWebsiteDisabled)
@ -240,7 +238,7 @@ func runServer() {
http.HandleFunc("/image_status", handleWebsiteDisabled) http.HandleFunc("/image_status", handleWebsiteDisabled)
http.HandleFunc("/privacy", handleWebsiteDisabled) http.HandleFunc("/privacy", handleWebsiteDisabled)
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled) http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
printInfo("Website is disabled.") printInfo("Website functionality disabled.")
} }
if config.NodesEnabled { if config.NodesEnabled {
@ -254,7 +252,7 @@ func runServer() {
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) { func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain") w.Header().Set("Content-Type", "text/plain")
w.WriteHeader(http.StatusServiceUnavailable) w.WriteHeader(http.StatusServiceUnavailable)
_, _ = w.Write([]byte("The website is currently disabled.")) _, _ = w.Write([]byte("The website functionality is currently disabled."))
} }
func handlePrivacyPage(w http.ResponseWriter, r *http.Request) { func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {

15
map.go
View file

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"net/http" "net/http"
"net/url" "net/url"
"time"
) )
type NominatimResponse struct { type NominatimResponse struct {
@ -58,7 +59,7 @@ func geocodeQuery(query string) (latitude, longitude string, found bool, err err
func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string) { func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string) {
// Start measuring the time for geocoding the query // Start measuring the time for geocoding the query
//startTime := time.Now() startTime := time.Now()
// Geocode the query to get coordinates // Geocode the query to get coordinates
latitude, longitude, found, err := geocodeQuery(query) latitude, longitude, found, err := geocodeQuery(query)
@ -69,15 +70,15 @@ func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string)
} }
// Measure the elapsed time for geocoding // Measure the elapsed time for geocoding
//elapsed := time.Since(startTime) elapsedTime := time.Since(startTime)
// Prepare the data to pass to the template // Prepare the data to pass to the template
data := map[string]interface{}{ data := map[string]interface{}{
"Query": query, "Query": query,
"Latitude": latitude, "Latitude": latitude,
"Longitude": longitude, "Longitude": longitude,
"Found": found, "Found": found,
//"Fetched": FormatElapsedTime(elapsed), // not used in map tab "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
"Theme": settings.Theme, "Theme": settings.Theme,
"Safe": settings.SafeSearch, "Safe": settings.SafeSearch,
"IsThemeDark": settings.IsThemeDark, "IsThemeDark": settings.IsThemeDark,

View file

@ -1,72 +0,0 @@
// music-bandcamp.go - Bandcamp specific implementation
package main
import (
"fmt"
"net/http"
"net/url"
"strings"
"github.com/PuerkitoBio/goquery"
)
func SearchBandcamp(query string, page int) ([]MusicResult, error) {
baseURL := "https://bandcamp.com/search?"
params := url.Values{
"q": []string{query},
"page": []string{fmt.Sprintf("%d", page)},
}
resp, err := http.Get(baseURL + params.Encode())
if err != nil {
return nil, fmt.Errorf("request failed: %v", err)
}
defer resp.Body.Close()
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to parse HTML: %v", err)
}
var results []MusicResult
doc.Find("li.searchresult").Each(func(i int, s *goquery.Selection) {
result := MusicResult{Source: "Bandcamp"}
// URL extraction
if urlSel := s.Find("div.itemurl a"); urlSel.Length() > 0 {
result.URL = strings.TrimSpace(urlSel.Text())
}
// Title extraction
if titleSel := s.Find("div.heading a"); titleSel.Length() > 0 {
result.Title = strings.TrimSpace(titleSel.Text())
}
// Artist extraction
if artistSel := s.Find("div.subhead"); artistSel.Length() > 0 {
result.Artist = strings.TrimSpace(artistSel.Text())
}
// Thumbnail extraction
if thumbSel := s.Find("div.art img"); thumbSel.Length() > 0 {
result.Thumbnail, _ = thumbSel.Attr("src")
}
// // Iframe URL construction
// if linkHref, exists := s.Find("div.itemurl a").Attr("href"); exists {
// if itemID := extractSearchItemID(linkHref); itemID != "" {
// itemType := strings.ToLower(strings.TrimSpace(s.Find("div.itemtype").Text()))
// result.IframeSrc = fmt.Sprintf(
// "https://bandcamp.com/EmbeddedPlayer/%s=%s/size=large/bgcol=000/linkcol=fff/artwork=small",
// itemType,
// itemID,
// )
// }
// }
results = append(results, result)
})
return results, nil
}

View file

@ -1,198 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"regexp"
"strings"
"github.com/PuerkitoBio/goquery"
)
type SoundCloudTrack struct {
ID int `json:"id"`
Title string `json:"title"`
Permalink string `json:"permalink"`
ArtworkURL string `json:"artwork_url"`
Duration int `json:"duration"`
User struct {
Username string `json:"username"`
Permalink string `json:"permalink"`
} `json:"user"`
Streams struct {
HTTPMP3128URL string `json:"http_mp3_128_url"`
} `json:"streams"`
}
func SearchSoundCloud(query string, page int) ([]MusicResult, error) {
clientID, err := extractClientID()
if err != nil {
return searchSoundCloudViaScraping(query, page)
}
apiResults, err := searchSoundCloudViaAPI(query, clientID, page)
if err == nil && len(apiResults) > 0 {
return convertSoundCloudResults(apiResults), nil
}
return searchSoundCloudViaScraping(query, page)
}
func searchSoundCloudViaAPI(query, clientID string, page int) ([]SoundCloudTrack, error) {
const limit = 10
offset := (page - 1) * limit
apiUrl := fmt.Sprintf(
"https://api-v2.soundcloud.com/search/tracks?q=%s&client_id=%s&limit=%d&offset=%d",
url.QueryEscape(query),
clientID,
limit,
offset,
)
resp, err := http.Get(apiUrl)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("API request failed with status: %d", resp.StatusCode)
}
var response struct {
Collection []SoundCloudTrack `json:"collection"`
}
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
return nil, err
}
return response.Collection, nil
}
func convertSoundCloudResults(tracks []SoundCloudTrack) []MusicResult {
var results []MusicResult
for _, track := range tracks {
thumbnail := strings.Replace(track.ArtworkURL, "large", "t500x500", 1)
trackURL := fmt.Sprintf("https://soundcloud.com/%s/%s",
track.User.Permalink,
track.Permalink,
)
results = append(results, MusicResult{
Title: track.Title,
Artist: track.User.Username,
URL: trackURL,
Thumbnail: thumbnail,
//AudioURL: track.Streams.HTTPMP3128URL,
Source: "SoundCloud",
Duration: fmt.Sprintf("%d", track.Duration/1000),
})
}
return results
}
func searchSoundCloudViaScraping(query string, page int) ([]MusicResult, error) {
searchUrl := fmt.Sprintf("https://soundcloud.com/search/sounds?q=%s", url.QueryEscape(query))
resp, err := http.Get(searchUrl)
if err != nil {
return nil, err
}
defer resp.Body.Close()
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, err
}
var results []MusicResult
doc.Find("li.searchList__item").Each(func(i int, s *goquery.Selection) {
titleElem := s.Find("a.soundTitle__title")
artistElem := s.Find("a.soundTitle__username")
artworkElem := s.Find(".sound__coverArt")
title := strings.TrimSpace(titleElem.Text())
artist := strings.TrimSpace(artistElem.Text())
href, _ := titleElem.Attr("href")
thumbnail, _ := artworkElem.Find("span.sc-artwork").Attr("style")
if thumbnail != "" {
if matches := regexp.MustCompile(`url\((.*?)\)`).FindStringSubmatch(thumbnail); len(matches) > 1 {
thumbnail = strings.Trim(matches[1], `"`)
}
}
if title == "" || href == "" {
return
}
trackURL, err := url.Parse(href)
if err != nil {
return
}
if trackURL.Host == "" {
trackURL.Scheme = "https"
trackURL.Host = "soundcloud.com"
}
trackURL.Path = strings.ReplaceAll(trackURL.Path, "//", "/")
fullURL := trackURL.String()
results = append(results, MusicResult{
Title: title,
Artist: artist,
URL: fullURL,
Thumbnail: thumbnail,
Source: "SoundCloud",
})
})
return results, nil
}
func extractClientID() (string, error) {
resp, err := http.Get("https://soundcloud.com/")
if err != nil {
return "", err
}
defer resp.Body.Close()
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return "", err
}
var clientID string
doc.Find("script[src]").Each(func(i int, s *goquery.Selection) {
if clientID != "" {
return
}
src, _ := s.Attr("src")
if strings.Contains(src, "sndcdn.com/assets/") {
resp, err := http.Get(src)
if err != nil {
return
}
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
re := regexp.MustCompile(`client_id:"([^"]+)"`)
matches := re.FindSubmatch(body)
if len(matches) > 1 {
clientID = string(matches[1])
}
}
})
if clientID == "" {
return "", fmt.Errorf("client_id not found")
}
return clientID, nil
}

View file

@ -1,81 +0,0 @@
package main
import (
"fmt"
"net/http"
"net/url"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
)
func SearchSpotify(query string, page int) ([]MusicResult, error) {
searchUrl := fmt.Sprintf("https://open.spotify.com/search/%s", url.PathEscape(query))
client := &http.Client{
Timeout: 10 * time.Second,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
},
}
req, err := http.NewRequest("GET", searchUrl, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %v", err)
}
// Set user agent ?
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("request failed: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("received non-200 status code: %d", resp.StatusCode)
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to parse document: %v", err)
}
var results []MusicResult
// Find track elements
doc.Find(`div[data-testid="tracklist-row"]`).Each(func(i int, s *goquery.Selection) {
// Extract title
title := s.Find(`div[data-testid="tracklist-row__title"] a`).Text()
title = strings.TrimSpace(title)
// Extract artist
artist := s.Find(`div[data-testid="tracklist-row__artist"] a`).First().Text()
artist = strings.TrimSpace(artist)
// Extract duration
duration := s.Find(`div[data-testid="tracklist-row__duration"]`).First().Text()
duration = strings.TrimSpace(duration)
// Extract URL
path, _ := s.Find(`div[data-testid="tracklist-row__title"] a`).Attr("href")
fullUrl := fmt.Sprintf("https://open.spotify.com%s", path)
// Extract thumbnail
thumbnail, _ := s.Find(`img[aria-hidden="false"]`).Attr("src")
if title != "" && artist != "" {
results = append(results, MusicResult{
Title: title,
Artist: artist,
URL: fullUrl,
Duration: duration,
Thumbnail: thumbnail,
Source: "Spotify",
})
}
})
return results, nil
}

View file

@ -1,85 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
)
type MusicAPIResponse struct {
Items []struct {
Title string `json:"title"`
UploaderName string `json:"uploaderName"`
Duration int `json:"duration"`
Thumbnail string `json:"thumbnail"`
URL string `json:"url"`
} `json:"items"` // Removed VideoID since we'll parse from URL
}
func SearchMusicViaPiped(query string, page int) ([]MusicResult, error) {
var lastError error
mu.Lock()
defer mu.Unlock()
for _, instance := range pipedInstances {
if disabledInstances[instance] {
continue
}
url := fmt.Sprintf(
"https://%s/search?q=%s&filter=music_songs&page=%d",
instance,
url.QueryEscape(query),
page,
)
resp, err := http.Get(url)
if err != nil || resp.StatusCode != http.StatusOK {
printInfo("Disabling instance %s due to error: %v", instance, err)
disabledInstances[instance] = true
lastError = fmt.Errorf("request to %s failed: %w", instance, err)
continue
}
defer resp.Body.Close()
var apiResp MusicAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
lastError = fmt.Errorf("failed to decode response from %s: %w", instance, err)
continue
}
return convertPipedToMusicResults(instance, apiResp), nil
}
return nil, fmt.Errorf("all Piped instances failed, last error: %v", lastError)
}
func convertPipedToMusicResults(instance string, resp MusicAPIResponse) []MusicResult {
seen := make(map[string]bool)
var results []MusicResult
for _, item := range resp.Items {
// Extract video ID from URL
u, err := url.Parse(item.URL)
if err != nil {
continue
}
videoID := u.Query().Get("v")
if videoID == "" || seen[videoID] {
continue
}
seen[videoID] = true
results = append(results, MusicResult{
Title: item.Title,
Artist: item.UploaderName,
URL: fmt.Sprintf("https://music.youtube.com%s", item.URL),
Duration: formatDuration(item.Duration),
Thumbnail: item.Thumbnail,
Source: "YouTube Music",
//AudioURL: fmt.Sprintf("https://%s/stream/%s", instance, videoID),
})
}
return results
}

176
music.go
View file

@ -1,176 +0,0 @@
// music.go - Central music search handler
package main
import (
"net/http"
"sync"
"time"
)
type MusicSearchEngine struct {
Name string
Func func(query string, page int) ([]MusicResult, error)
}
var (
musicSearchEngines []MusicSearchEngine
cacheMutex = &sync.Mutex{}
)
var allMusicSearchEngines = []MusicSearchEngine{
{Name: "SoundCloud", Func: SearchSoundCloud},
{Name: "YouTube", Func: SearchMusicViaPiped},
{Name: "Bandcamp", Func: SearchBandcamp},
//{Name: "Spotify", Func: SearchSpotify},
}
func initMusicEngines() {
// Initialize with all engines if no specific config
musicSearchEngines = allMusicSearchEngines
}
func handleMusicSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
start := time.Now()
cacheKey := CacheKey{
Query: query,
Page: page,
Type: "music",
Lang: settings.SearchLanguage,
Safe: settings.SafeSearch == "active",
}
var results []MusicResult
if cached, found := resultsCache.Get(cacheKey); found {
if musicResults, ok := convertCacheToMusicResults(cached); ok {
results = musicResults
}
}
if len(results) == 0 {
results = fetchMusicResults(query, page)
if len(results) > 0 {
resultsCache.Set(cacheKey, convertMusicResultsToCache(results))
}
}
go prefetchMusicPages(query, page)
elapsed := time.Since(start) // Calculate duration
data := map[string]interface{}{
"Results": results,
"Query": query,
"Page": page,
"HasPrevPage": page > 1,
"HasNextPage": len(results) >= 10, // Default page size
"MusicServices": getMusicServiceNames(),
"CurrentService": "all", // Default service
"Theme": settings.Theme,
"IsThemeDark": settings.IsThemeDark,
"Trans": Translate,
"Fetched": FormatElapsedTime(elapsed),
}
renderTemplate(w, "music.html", data)
}
// Helper to get music service names
func getMusicServiceNames() []string {
names := make([]string, len(allMusicSearchEngines))
for i, engine := range allMusicSearchEngines {
names[i] = engine.Name
}
return names
}
func convertMusicResultsToCache(results []MusicResult) []SearchResult {
cacheResults := make([]SearchResult, len(results))
for i, r := range results {
cacheResults[i] = r
}
return cacheResults
}
func convertCacheToMusicResults(cached []SearchResult) ([]MusicResult, bool) {
results := make([]MusicResult, 0, len(cached))
for _, item := range cached {
if musicResult, ok := item.(MusicResult); ok {
results = append(results, musicResult)
} else {
return nil, false
}
}
return results, true
}
func fetchMusicResults(query string, page int) []MusicResult {
var results []MusicResult
resultsChan := make(chan []MusicResult, len(musicSearchEngines))
var wg sync.WaitGroup
for _, engine := range musicSearchEngines {
wg.Add(1)
go func(e MusicSearchEngine) {
defer wg.Done()
res, err := e.Func(query, page)
if err == nil && len(res) > 0 {
resultsChan <- res
}
}(engine)
}
go func() {
wg.Wait()
close(resultsChan)
}()
for res := range resultsChan {
results = append(results, res...)
if len(results) >= 50 { // Default max results
break
}
}
return deduplicateResults(results)
}
func prefetchMusicPages(query string, currentPage int) {
for _, page := range []int{currentPage - 1, currentPage + 1} {
if page < 1 {
continue
}
cacheKey := CacheKey{
Query: query,
Page: page,
Type: "music",
}
if _, found := resultsCache.Get(cacheKey); !found {
go fetchMusicResults(query, page)
}
}
}
func deduplicateResults(results []MusicResult) []MusicResult {
seen := make(map[string]bool)
var unique []MusicResult
for _, res := range results {
if !seen[res.URL] {
seen[res.URL] = true
unique = append(unique, res)
}
}
return unique
}
// func generatePlayerHTML(result MusicResult) template.HTML {
// if result.IframeSrc != "" {
// return template.HTML(fmt.Sprintf(
// `<iframe width="100%%" height="166" scrolling="no" frameborder="no" src="%s"></iframe>`,
// result.IframeSrc,
// ))
// }
// return template.HTML("")
// }

View file

@ -5,7 +5,7 @@ import (
"crypto/rand" "crypto/rand"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io/ioutil"
"net/http" "net/http"
"time" "time"
) )
@ -65,10 +65,7 @@ func sendMessage(serverAddr string, msg Message) error {
defer resp.Body.Close() defer resp.Body.Close()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
body, err := io.ReadAll(resp.Body) body, _ := ioutil.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("failed to read response body: %v", err)
}
return fmt.Errorf("server error: %s", body) return fmt.Errorf("server error: %s", body)
} }

270
proxy.go
View file

@ -1,270 +0,0 @@
package main
import (
"fmt"
"net/http"
"strings"
"sync"
"time"
"golang.org/x/net/proxy"
)
// ProxyConfig holds configuration for a single proxy.
type ProxyConfig struct {
Address string
Username string
Password string
}
// ProxyClient provides an HTTP client pool for proxies.
type ProxyClient struct {
clients []*http.Client
lock sync.Mutex
index int
}
// Package-level proxy clients
var (
metaProxyClient *ProxyClient
crawlerProxyClient *ProxyClient
)
// NewProxyClientPool creates a pool of HTTP clients with SOCKS5 proxies.
func NewProxyClientPool(proxies []ProxyConfig, timeout time.Duration) (*ProxyClient, error) {
if len(proxies) == 0 {
return nil, fmt.Errorf("no proxies provided")
}
clients := make([]*http.Client, len(proxies))
for i, pc := range proxies {
var auth *proxy.Auth
if pc.Username != "" || pc.Password != "" {
auth = &proxy.Auth{
User: pc.Username,
Password: pc.Password,
}
}
dialer, err := proxy.SOCKS5("tcp", pc.Address, auth, proxy.Direct)
if err != nil {
return nil, fmt.Errorf("failed to create SOCKS5 dialer for %s: %w", pc.Address, err)
}
transport := &http.Transport{Dial: dialer.Dial}
clients[i] = &http.Client{
Transport: transport,
Timeout: timeout,
}
}
return &ProxyClient{clients: clients}, nil
}
// Do sends an HTTP request using the next proxy in the pool.
func (p *ProxyClient) Do(req *http.Request) (*http.Response, error) {
p.lock.Lock()
client := p.clients[p.index]
p.index = (p.index + 1) % len(p.clients)
p.lock.Unlock()
return client.Do(req)
}
func (p *ProxyClient) GetProxy() string {
p.lock.Lock()
defer p.lock.Unlock()
if len(p.clients) == 0 {
return ""
}
// Round-robin proxy retrieval
client := p.clients[p.index]
p.index = (p.index + 1) % len(p.clients)
// Assume each client has a proxy string saved
// Example implementation depends on how your proxies are configured
proxyTransport, ok := client.Transport.(*http.Transport)
if ok && proxyTransport.Proxy != nil {
proxyURL, _ := proxyTransport.Proxy(nil)
if proxyURL != nil {
return proxyURL.String()
}
}
return ""
}
// ParseProxies parses the proxy strings in the format ADDRESS:PORT or ADDRESS:PORT:USER:PASSWORD.
func ParseProxies(proxyStrings []string) []ProxyConfig {
var proxies []ProxyConfig
for _, proxyStr := range proxyStrings {
parts := strings.Split(proxyStr, ":")
switch len(parts) {
case 2: // ADDRESS:PORT
proxies = append(proxies, ProxyConfig{
Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
})
case 4: // ADDRESS:PORT:USER:PASSWORD
proxies = append(proxies, ProxyConfig{
Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
Username: parts[2],
Password: parts[3],
})
default:
fmt.Printf("Invalid proxy format: %s\n", proxyStr)
}
}
return proxies
}
// InitProxies initializes the proxy clients for Meta and Crawler proxies.
func InitProxies() {
// Initialize Meta Proxy Client
if config.MetaProxyEnabled {
metaProxies := ParseProxies(config.MetaProxies)
client, err := NewProxyClientPool(metaProxies, 30*time.Second)
if err != nil {
if config.MetaProxyStrict {
panic(fmt.Sprintf("Failed to initialize Meta proxies: %v", err))
}
fmt.Printf("Warning: Meta proxy initialization failed: %v\n", err)
}
metaProxyClient = client
}
// Initialize Crawler Proxy Client
if config.CrawlerProxyEnabled {
crawlerProxies := ParseProxies(config.CrawlerProxies)
client, err := NewProxyClientPool(crawlerProxies, 30*time.Second)
if err != nil {
if config.CrawlerProxyStrict {
panic(fmt.Sprintf("Failed to initialize Crawler proxies: %v", err))
}
fmt.Printf("Warning: Crawler proxy initialization failed: %v\n", err)
}
crawlerProxyClient = client
}
}
// Doer is an interface so we can accept *http.Client or *ProxyClient for requests.
type Doer interface {
Do(*http.Request) (*http.Response, error)
}
// DoProxyRequest handles “try direct, then proxy if needed,” with retries if proxy is used.
//
// - strict: if true, always try proxy first if enabled; if not available, do one direct attempt
// - enabled: whether this type of proxy is turned on
// - retryCount: how many times to retry with the proxy
// - proxyClient: the pool of proxy connections
func DoProxyRequest(req *http.Request, strict bool, enabled bool, retryCount int, proxyClient *ProxyClient) (*http.Response, error) {
// 1) If !strict => try direct once first
if !strict {
resp, err := tryRequestOnce(req, http.DefaultClient)
if isSuccessful(resp, err) {
return resp, nil
}
// If direct fails => if proxy is enabled, retry
if enabled && proxyClient != nil {
resp, err = tryRequestWithRetry(req, proxyClient, retryCount)
if isSuccessful(resp, err) {
return resp, nil
}
return nil, fmt.Errorf("failed after direct & proxy attempts: %v", err)
}
return nil, fmt.Errorf("request failed direct, no valid proxy: %v", err)
}
// 2) If strict => if proxy is enabled, try it up to “retryCount”
if enabled && proxyClient != nil {
resp, err := tryRequestWithRetry(req, proxyClient, retryCount)
if isSuccessful(resp, err) {
return resp, nil
}
return nil, fmt.Errorf("failed after %d proxy attempts: %v", retryCount, err)
}
// If strict but no proxy => direct once
resp, err := tryRequestOnce(req, http.DefaultClient)
if isSuccessful(resp, err) {
return resp, nil
}
return nil, fmt.Errorf("direct request failed in strict mode, no proxy: %v", err)
}
// Helper Wrapper functions for DoProxyRequest()
func DoMetaProxyRequest(req *http.Request) (*http.Response, error) {
return DoProxyRequest(
req,
config.MetaProxyStrict,
config.MetaProxyEnabled,
config.MetaProxyRetry,
metaProxyClient,
)
}
func DoCrawlerProxyRequest(req *http.Request) (*http.Response, error) {
return DoProxyRequest(
req,
config.CrawlerProxyStrict,
config.CrawlerProxyEnabled,
config.CrawlerProxyRetry,
metaProxyClient,
)
}
// tryRequestWithRetry tries the request up to "retries" times, waiting 200ms between attempts.
func tryRequestWithRetry(req *http.Request, client Doer, retries int) (*http.Response, error) {
var resp *http.Response
var err error
for i := 1; i <= retries; i++ {
if resp != nil {
resp.Body.Close()
}
printDebug("Attempt %d of %d with proxy/client...", i, retries)
resp, err = tryRequestOnce(req, client)
if isSuccessful(resp, err) {
return resp, nil
}
time.Sleep(200 * time.Millisecond)
}
return resp, err
}
// tryRequestOnce sends a single request with the given client. If client is nil, uses default client.
func tryRequestOnce(req *http.Request, client Doer) (*http.Response, error) {
if client == nil {
client = http.DefaultClient
}
resp, err := client.Do(req)
return resp, err
}
// isSuccessful checks if err==nil & resp != nil & resp.StatusCode in [200..299].
func isSuccessful(resp *http.Response, err error) bool {
if err != nil || resp == nil {
return false
}
return resp.StatusCode >= 200 && resp.StatusCode < 300
}
// func main() {
// config := loadConfig()
// // Initialize proxies if enabled
// if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
// InitProxies()
// }
// // Example usage
// if metaProxyClient != nil {
// req, _ := http.NewRequest("GET", "https://example.com", nil)
// resp, err := metaProxyClient.Do(req)
// if err != nil {
// fmt.Printf("Error using MetaProxyClient: %v\n", err)
// } else {
// fmt.Printf("Meta Proxy Response Status: %s\n", resp.Status)
// resp.Body.Close()
// }
// }
// }

61
run.bat
View file

@ -5,7 +5,7 @@ rem Initialize variables
set SKIP_CONFIG="" set SKIP_CONFIG=""
set PORT="" set PORT=""
set DOMAIN="" set DOMAIN=""
set CONFIG_FILE="" set BUILD_MODE=false
set BUILD_OUTPUT=qgato.exe set BUILD_OUTPUT=qgato.exe
rem Parse arguments rem Parse arguments
@ -23,14 +23,13 @@ if "%~1"=="--domain" (
shift shift
goto parse_args goto parse_args
) )
if "%~1"=="--config" ( if "%~1"=="--skip-config-check" (
set CONFIG_FILE=%~2 set SKIP_CONFIG=--skip-config-check
shift
shift shift
goto parse_args goto parse_args
) )
if "%~1"=="--skip-config-check" ( if "%~1"=="--build" (
set SKIP_CONFIG=--skip-config-check set BUILD_MODE=true
shift shift
goto parse_args goto parse_args
) )
@ -51,29 +50,37 @@ for %%f in (*.go) do (
) )
) )
rem Always delete and rebuild the binary if "%BUILD_MODE%"=="true" (
echo Cleaning previous build... rem Build mode
if exist "%BUILD_OUTPUT%" del "%BUILD_OUTPUT%" echo Building application...
go build -o "%BUILD_OUTPUT%" !GO_FILES!
if errorlevel 1 (
echo Build failed!
exit /b 1
)
echo Build successful! Output: %CD%\%BUILD_OUTPUT%
) else (
rem Check if the executable exists
if not exist "%BUILD_OUTPUT%" (
echo Executable not found. Building it first...
go build -o "%BUILD_OUTPUT%" !GO_FILES!
if errorlevel 1 (
echo Build failed! Unable to run the application.
exit /b 1
)
)
echo Building application... rem Construct the command
go build -o "%BUILD_OUTPUT%" !GO_FILES! set CMD="%BUILD_OUTPUT% !SKIP_CONFIG!"
if errorlevel 1 ( if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
echo Build failed! if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
exit /b 1
rem Informative output
echo Starting application with command: !CMD!
rem Run the application
call !CMD!
) )
echo Build successful! Output: %CD%\%BUILD_OUTPUT%
rem Construct the command
set CMD=%BUILD_OUTPUT% !SKIP_CONFIG!
if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
if not "%CONFIG_FILE%"=="" set CMD=!CMD! --config %CONFIG_FILE%
rem Informative output
echo Starting application with command: !CMD!
rem Run the built executable
call !CMD!
rem Return to the original directory rem Return to the original directory
popd popd

59
run.sh
View file

@ -4,7 +4,7 @@
SKIP_CONFIG="" SKIP_CONFIG=""
PORT="" PORT=""
DOMAIN="" DOMAIN=""
CONFIG_FILE="" BUILD_MODE=false
BUILD_OUTPUT="qgato" BUILD_OUTPUT="qgato"
# Parse arguments # Parse arguments
@ -18,14 +18,14 @@ while [ $# -gt 0 ]; do
DOMAIN=$2 DOMAIN=$2
shift 2 shift 2
;; ;;
--config)
CONFIG_FILE=$2
shift 2
;;
--skip-config-check) --skip-config-check)
SKIP_CONFIG="--skip-config-check" SKIP_CONFIG="--skip-config-check"
shift shift
;; ;;
--build)
BUILD_MODE=true
shift
;;
*) *)
echo "Unknown argument: $1" echo "Unknown argument: $1"
exit 1 exit 1
@ -39,26 +39,33 @@ SCRIPT_DIR=$(dirname "$0")
# List all Go files in the script directory (excluding test files) # List all Go files in the script directory (excluding test files)
GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print) GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print)
# Always delete and rebuild the binary if $BUILD_MODE; then
echo "Cleaning previous build..." # Build mode
rm -f "$SCRIPT_DIR/$BUILD_OUTPUT" echo "Building application..."
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
echo "Building application..." if [ $? -eq 0 ]; then
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
if [ $? -eq 0 ]; then else
echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT" echo "Build failed!"
exit 1
fi
else else
echo "Build failed!" # Run mode
exit 1 CMD="./$BUILD_OUTPUT $SKIP_CONFIG"
[ -n "$PORT" ] && CMD="$CMD --port $PORT"
[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
if [ ! -f "$SCRIPT_DIR/$BUILD_OUTPUT" ]; then
echo "Executable not found. Building it first..."
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
if [ $? -ne 0 ]; then
echo "Build failed! Unable to run the application."
exit 1
fi
fi
echo "Starting application with command: $CMD"
# Run the executable
eval $CMD
fi fi
# Construct the run command
CMD="$SCRIPT_DIR/$BUILD_OUTPUT $SKIP_CONFIG"
[ -n "$PORT" ] && CMD="$CMD --port $PORT"
[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
[ -n "$CONFIG_FILE" ] && CMD="$CMD --config $CONFIG_FILE"
echo "Starting application with command: $CMD"
# Run the built executable
eval $CMD

View file

@ -31,5 +31,5 @@
font-family: 'Material Icons Round'; font-family: 'Material Icons Round';
font-style: normal; font-style: normal;
font-weight: 400; font-weight: 400;
src: url('/static/fonts/MaterialIcons-Round.woff2') format('woff2'); src: url('/static/fonts/material-icons-round-v108-latin-regular.woff2') format('woff2');
} }

View file

@ -1,63 +0,0 @@
/* Image Loading Effect */
.loading-image {
position: relative;
overflow: hidden;
background-color: var(--snip-background);
background-image: linear-gradient(
90deg,
rgba(255, 255, 255, 0) 25%,
rgba(255, 255, 255, 0.15) 50%,
rgba(255, 255, 255, 0) 75%
);
background-size: 200% 100%;
animation: image-wave 2s infinite linear;
}
/* Title Loading Effect */
.title-loading {
position: relative;
overflow: hidden;
color: transparent !important;
background-color: var(--snip-background);
min-height: 1.2em;
width: 80%;
margin: 0 auto;
top: 2px;
border-radius: 6px;
}
.title-loading::after {
content: '';
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: linear-gradient(
90deg,
transparent 25%,
rgba(255, 255, 255, 0.25) 50%,
transparent 75%
);
background-size: 200% 100%;
animation: title-wave 2.5s infinite linear;
}
/* Animations */
@keyframes image-wave {
0% {
background-position: -100% 0; /* Start off-screen left */
}
100% {
background-position: 100% 0; /* End off-screen right */
}
}
@keyframes title-wave {
0% {
background-position: -100% 0; /* Start off-screen left */
}
100% {
background-position: 100% 0; /* End off-screen right */
}
}

View file

@ -33,7 +33,6 @@
#viewer-image { #viewer-image {
max-width: 100%; max-width: 100%;
max-height: 60vh; max-height: 60vh;
border-radius: 5px;
} }
/* Viewer Title */ /* Viewer Title */
@ -102,13 +101,13 @@
/* View Image Container */ /* View Image Container */
#viewer-image-container { #viewer-image-container {
background-color: #0000; background-color: var(--view-image-color);
width: 100%; width: 100%;
height: auto; height: auto;
display: flex; display: flex;
justify-content: center; justify-content: center;
align-items: center; align-items: center;
margin-top: 20px; margin-top: 50px;
} }
/* Full Size and Proxy Size Links */ /* Full Size and Proxy Size Links */
@ -154,24 +153,14 @@
} }
/* Responsive Design */ /* Responsive Design */
@media only screen and (max-width: 880px) { @media only screen and (max-width: 750px) {
#image-viewer { #image-viewer {
width: 100%; width: 100%;
height: 100% !important; height: 77%;
margin-top: 28px; margin-top: -33px;
margin-right: 0%; margin-right: 0%;
border-top-right-radius: 0px; border-top-right-radius: 0px;
border-top-left-radius: 0px; border-top-left-radius: 0px;
padding-top: 10px;
padding-bottom: 10px;
}
.material-icons-round {
font-size: 32px;
}
#viewer-image-container {
margin-top: 5px;
} }
#viewer-image { #viewer-image {

View file

@ -1,60 +0,0 @@
.message-bottom-right {
opacity: 0;
pointer-events: none;
transition: opacity 0.3s ease-in-out;
align-items: center;
justify-content: center;
position: fixed;
bottom: 20px;
right: 20px;
background-color: var(--search-bg);
color: var(--text-color);
padding: 10px;
border-radius: 5px;
z-index: 1000;
text-align: center;
flex-direction: column;
border: 1px solid var(--border);
box-shadow: 0 0 10px var(--box-shadow);
}
.message-bottom-right.visible {
opacity: 1;
pointer-events: auto;
}
@keyframes bounce {
0%, 100% {
transform: translateY(0);
}
30% {
transform: translateY(-10px);
}
50% {
transform: translateY(0);
}
70% {
transform: translateY(-5px);
}
85% {
transform: translateY(0);
}
95% {
transform: translateY(-2px);
}
}
.dot {
display: inline-block;
animation: bounce 1.5s infinite;
}
.dot:nth-child(2) {
animation-delay: 0.1s;
}
.dot:nth-child(3) {
animation-delay: 0.2s;
}
.dot:nth-child(4) {
animation-delay: 0.3s;
}

View file

@ -1,117 +0,0 @@
/* Music Results Styling */
.result-item.music-item {
display: flex;
gap: 16px;
margin-bottom: 24px;
align-items: flex-start;
}
.music-thumbnail {
position: relative;
flex: 0 0 160px;
aspect-ratio: 1;
border-radius: 5px;
overflow: hidden;
background: var(--placeholder-bg);
}
.music-thumbnail img {
width: 100%;
height: 100%;
object-fit: cover;
transition: transform 0.2s ease;
}
.music-thumbnail:hover img {
transform: scale(1.03);
}
.thumbnail-placeholder {
width: 100%;
height: 100%;
display: flex;
align-items: center;
justify-content: center;
background: var(--placeholder-bg);
color: var(--placeholder-icon);
}
.thumbnail-placeholder .material-icons-round {
font-size: 2.5rem;
}
.duration-overlay {
position: absolute;
bottom: 2px;
right: 2px;
background: rgba(0, 0, 0, 0.8);
color: white;
padding: 4px 8px;
border-radius: 3px;
font-size: 12px;
font-weight: 500;
backdrop-filter: blur(2px);
}
.music-info {
flex: 1;
min-width: 0;
padding-top: 4px;
}
.music-title {
margin: 0 0 8px 0;
font-size: 18px;
line-height: 1.3;
font-weight: 500;
color: var(--text-primary);
}
.music-title:hover {
text-decoration: underline;
}
.music-meta {
display: flex;
align-items: center;
gap: 8px;
font-size: 14px;
color: var(--text-secondary);
}
.artist {
color: var(--accent-color);
font-weight: 500;
}
.meta-separator {
color: var(--border-color);
font-size: 12px;
}
/* Responsive Design */
@media (max-width: 768px) {
.music-thumbnail {
flex-basis: 120px;
}
.music-title {
font-size: 16px;
}
.music-meta {
font-size: 13px;
gap: 6px;
}
}
@media (max-width: 480px) {
.music-thumbnail {
flex-basis: 100px;
}
.duration-overlay {
font-size: 11px;
padding: 3px 6px;
}
}

View file

@ -60,19 +60,10 @@
visibility: hidden; visibility: hidden;
} }
.fetched_dif_videos { .fetched_dif {
margin-top: 110px !important; margin-top: 110px !important;
} }
.fetched_dif_files{
margin-top: 10px !important;
}
.fetched_dif_images {
margin-top: 10px ;
}
.fetched_img { .fetched_img {
margin-left: 1.2% !important; margin-left: 1.2% !important;
left: 0px !important; left: 0px !important;
@ -303,7 +294,6 @@ html {
} }
.btn-nostyle { .btn-nostyle {
font-family: 'Inter', Arial, Helvetica, sans-serif !important;
background-color: inherit; background-color: inherit;
border: none; border: none;
padding: 0px; padding: 0px;
@ -384,11 +374,10 @@ hr {
.results .video_title { .results .video_title {
font-size: 16px; font-size: 16px;
} }
/*
this is so stupid, separate css into general style and per result page css style to avoid this .video_title {
.video_title h3 { font-size: 16px;
margin-top: 0px !important; }
} */
.video_title a { .video_title a {
color: var(--link); color: var(--link);
@ -408,7 +397,6 @@ this is so stupid, separate css into general style and per result page css style
width: 254px; width: 254px;
height: 143px; height: 143px;
object-fit: cover; object-fit: cover;
border-radius: 5px;
} }
.video__img__results { .video__img__results {
@ -439,19 +427,13 @@ this is so stupid, separate css into general style and per result page css style
.duration { .duration {
position: absolute; position: absolute;
color: #fff; color: #fff;
font-size: 12px; font-size: 11px;
font-weight: 500;
padding: .5em; padding: .5em;
background: rgba(0, 0, 0, 0.8); background: rgba(0, 0, 0, .5);
color: white; right: 0;
padding: 4px 8px;
margin-top: -28px !important; margin-top: -28px !important;
line-height: 1.3; line-height: 1.3;
letter-spacing: -0.4px; letter-spacing: -0.4px;
bottom: 6px;
right: 2px;
border-radius: 3px;
backdrop-filter: blur(2px);
} }
.pipe { .pipe {
@ -641,10 +623,6 @@ this is so stupid, separate css into general style and per result page css style
text-align: left; text-align: left;
} }
.torrent-cat {
margin-top: 110px;
}
.torrent-cat:hover, .torrent-cat:hover,
.torrent-settings:hover, .torrent-settings:hover,
.torrent-sort-save:hover { .torrent-sort-save:hover {
@ -1183,7 +1161,8 @@ p {
color: var(--fg); color: var(--fg);
width: 530px; width: 530px;
padding: 15px; padding: 15px;
margin-top: 10px; margin-bottom: 627px;
margin-top: 20px;
font-size: 14px; font-size: 14px;
line-height: 1.58; line-height: 1.58;
letter-spacing: normal; letter-spacing: normal;
@ -1309,6 +1288,24 @@ p {
text-shadow: 1px 1px 2px var(--border) !important; /* Adjust text shadow */ text-shadow: 1px 1px 2px var(--border) !important; /* Adjust text shadow */
} }
.message-bottom-left {
display: none;
align-items: center;
justify-content: center;
position: fixed;
bottom: 20px;
right: 20px;
background-color: var(--search-bg);
color: var(--text-color);
padding: 10px;
border-radius: 5px;
z-index: 1000;
text-align: center;
flex-direction: column;
border: 1px solid var(--border);
box-shadow: 0 0 10px var(--box-shadow);
}
body, h1, p, a, input, button { body, h1, p, a, input, button {
color: var(--text-color); /* Applies the text color based on theme */ color: var(--text-color); /* Applies the text color based on theme */
background-color: var(--background-color); /* Applies the background color based on theme */ background-color: var(--background-color); /* Applies the background color based on theme */
@ -1592,27 +1589,15 @@ body, h1, p, a, input, button {
} }
.fetched_img { .fetched_img {
margin-top: 25px !important; margin-top: 135px !important;
margin-left: 1.2% !important; margin-left: 1.2% !important;
left: 0px !important; left: 0px !important;
} }
.fetched_vid { .fetched_vid {
margin-top: 25px !important;
}
.fetched_dif_videos {
margin-top: 135px !important; margin-top: 135px !important;
} }
.fetched_dif_files{
margin-top: 25px !important;
}
.fetched_dif_images {
margin-top: 25px;
}
.results_settings { .results_settings {
left: 20px; left: 20px;
font-size: 13px; font-size: 13px;
@ -1624,7 +1609,6 @@ body, h1, p, a, input, button {
} }
form.torrent-sort { form.torrent-sort {
margin-top: 35px;
left: 20px; left: 20px;
} }

View file

@ -8,24 +8,16 @@ document.addEventListener("DOMContentLoaded", function() {
let searchType = templateData.getAttribute('data-type') || 'text'; // Default to 'text' if not provided let searchType = templateData.getAttribute('data-type') || 'text'; // Default to 'text' if not provided
let loading = false; let loading = false;
let hasMoreResults = true; let hasMoreResults = true;
const loadingIndicator = document.getElementById('message-bottom-right'); const loadingIndicator = document.getElementById('message-bottom-left');
let loadingTimeout; let loadingTimeout;
function showLoadingMessage() {
loadingIndicator.classList.add('visible');
}
function hideLoadingMessage() {
loadingIndicator.classList.remove('visible');
}
function loadResults(newPage) { function loadResults(newPage) {
if (loading || !hasMoreResults) return; if (loading || !hasMoreResults) return;
loading = true; loading = true;
// Show loading indicator if taking more than 150ms // Show loading indicator if taking more than 150ms
loadingTimeout = setTimeout(() => { loadingTimeout = setTimeout(() => {
showLoadingMessage() loadingIndicator.style.display = 'flex';
}, 150); }, 150);
fetch(`/search?q=${encodeURIComponent(query)}&t=${encodeURIComponent(searchType)}&p=${newPage}`) fetch(`/search?q=${encodeURIComponent(query)}&t=${encodeURIComponent(searchType)}&p=${newPage}`)
@ -37,7 +29,7 @@ document.addEventListener("DOMContentLoaded", function() {
}) })
.then(data => { .then(data => {
clearTimeout(loadingTimeout); clearTimeout(loadingTimeout);
hideLoadingMessage() loadingIndicator.style.display = 'none';
const parser = new DOMParser(); const parser = new DOMParser();
const doc = parser.parseFromString(data, 'text/html'); const doc = parser.parseFromString(data, 'text/html');
const newResultsHTML = doc.getElementById('results').innerHTML; const newResultsHTML = doc.getElementById('results').innerHTML;
@ -63,7 +55,7 @@ document.addEventListener("DOMContentLoaded", function() {
}) })
.catch(error => { .catch(error => {
clearTimeout(loadingTimeout); clearTimeout(loadingTimeout);
hideLoadingMessage() loadingIndicator.style.display = 'none';
console.error('Error loading results:', error); console.error('Error loading results:', error);
hasMoreResults = false; hasMoreResults = false;
loading = false; loading = false;

View file

@ -1,185 +1,197 @@
// dynamicscrollingimages.js
(function() { (function() {
// Add loading effects to image and title // Configuration
function addLoadingEffects(imgElement) { const imageStatusInterval = 500; // Interval in milliseconds to check image status
const title = imgElement.closest('.image').querySelector('.img_title'); const scrollThreshold = 500; // Distance from bottom of the page to trigger loading
imgElement.classList.add('loading-image'); const loadingIndicator = document.getElementById('message-bottom-left');
title.classList.add('title-loading'); let loadingTimer;
}
function removeLoadingEffects(imgElement) {
const title = imgElement.closest('.image').querySelector('.img_title');
imgElement.classList.remove('loading-image');
title.classList.remove('title-loading');
if (imgElement.src.endsWith('/images/missing.svg')) {
imgElement.closest('.image').remove();
}
}
// Modified handleImageError with theme-consistent error handling
function handleImageError(imgElement, retryCount = 3, retryDelay = 1000) {
const container = imgElement.closest('.image');
const title = container.querySelector('.img_title');
if (retryCount > 0) {
setTimeout(() => {
imgElement.src = imgElement.getAttribute('data-full');
imgElement.onerror = () => handleImageError(imgElement, retryCount - 1, retryDelay);
}, retryDelay);
} else {
imgElement.classList.remove('loading-image');
title.classList.remove('title-loading');
container.style.display = 'none';
}
}
// Rest of your existing code with minor additions
const imageStatusInterval = 500;
const scrollThreshold = 500;
const loadingIndicator = document.getElementById('message-bottom-right'); let loadingTimer;
let isFetching = false; let isFetching = false;
let page = parseInt(document.getElementById('template-data').getAttribute('data-page')) || 1; let page = parseInt(document.getElementById('template-data').getAttribute('data-page')) || 1;
let query = document.getElementById('template-data').getAttribute('data-query'); let query = document.getElementById('template-data').getAttribute('data-query');
let hardCacheEnabled = document.getElementById('template-data').getAttribute('data-hard-cache-enabled') === 'true'; let hardCacheEnabled = document.getElementById('template-data').getAttribute('data-hard-cache-enabled') === 'true';
let noMoreImages = false; let noMoreImages = false; // Flag to indicate if there are no more images to load
let imageElements = []; let imageElements = [];
let imageIds = []; let imageIds = [];
let imageStatusTimer;
function showLoadingMessage() { /**
loadingIndicator.classList.add('visible'); * Function to handle image load errors with retry logic
} * @param {HTMLElement} imgElement - The image element that failed to load
* @param {number} retryCount - Number of retries left
function hideLoadingMessage() { * @param {number} retryDelay - Delay between retries in milliseconds
loadingIndicator.classList.remove('visible'); */
function handleImageError(imgElement, retryCount = 3, retryDelay = 1000) {
if (retryCount > 0) {
setTimeout(() => {
imgElement.src = imgElement.getAttribute('data-full');
imgElement.onerror = function() {
handleImageError(imgElement, retryCount - 1, retryDelay);
};
}, retryDelay);
} else {
// After retries, hide the image container or set a fallback image
console.warn('Image failed to load after retries:', imgElement.getAttribute('data-full'));
imgElement.parentElement.style.display = 'none'; // Hide the image container
// Alternatively, set a fallback image:
// imgElement.src = '/static/images/fallback.svg';
}
} }
/**
* Function to ensure the page is scrollable by loading more images if necessary
*/
function ensureScrollable() { function ensureScrollable() {
if (noMoreImages) return; if (noMoreImages) return; // Do not attempt if no more images are available
// Check if the page is not scrollable
if (document.body.scrollHeight <= window.innerHeight) { if (document.body.scrollHeight <= window.innerHeight) {
// If not scrollable, fetch the next page
fetchNextPage(); fetchNextPage();
} }
} }
/**
* Function to fetch the next page of images
*/
function fetchNextPage() { function fetchNextPage() {
if (isFetching || noMoreImages) return; if (isFetching || noMoreImages) return;
// Start the timer for loading indicator
loadingTimer = setTimeout(() => { loadingTimer = setTimeout(() => {
showLoadingMessage(); loadingIndicator.style.display = 'flex';
}, 150); }, 150);
isFetching = true; isFetching = true;
page += 1; page += 1;
fetch(`/search?q=${encodeURIComponent(query)}&t=image&p=${page}&ajax=true`) fetch(`/search?q=${encodeURIComponent(query)}&t=image&p=${page}&ajax=true`)
.then(response => response.text()) .then(response => response.text())
.then(html => { .then(html => {
clearTimeout(loadingTimer); clearTimeout(loadingTimer); // Clear the timer if fetch is successful
hideLoadingMessage(); loadingIndicator.style.display = 'none'; // Hide the loading indicator
let tempDiv = document.createElement('div'); let parser = new DOMParser();
tempDiv.innerHTML = html; let doc = parser.parseFromString(html, 'text/html');
let newImages = tempDiv.querySelectorAll('.image'); let newImages = doc.querySelectorAll('.image');
if (newImages.length > 0) { if (newImages.length > 0) {
let resultsContainer = document.querySelector('.images'); let resultsContainer = document.querySelector('.images');
newImages.forEach(imageDiv => { newImages.forEach(imageDiv => {
let clonedImageDiv = imageDiv.cloneNode(true); // Append new images to the container
resultsContainer.appendChild(clonedImageDiv); resultsContainer.appendChild(imageDiv);
let img = clonedImageDiv.querySelector('img'); // Get the img element
if (img && img.getAttribute('data-id')) { let img = imageDiv.querySelector('img');
addLoadingEffects(img); if (img) {
if (hardCacheEnabled) { let id = img.getAttribute('data-id');
img.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII='; if (id) {
img.onerror = () => handleImageError(img);
imageElements.push(img); imageElements.push(img);
imageIds.push(img.getAttribute('data-id')); imageIds.push(id);
}
if (hardCacheEnabled) {
// Replace image with placeholder
img.src = '/static/images/placeholder.svg';
img.onerror = function() {
handleImageError(img);
};
} else { } else {
// HardCacheEnabled is false; load images immediately
img.src = img.getAttribute('data-full'); img.src = img.getAttribute('data-full');
img.onload = () => removeLoadingEffects(img); img.onerror = function() {
img.onerror = () => handleImageError(img); handleImageError(img);
};
} }
} }
}); });
if (hardCacheEnabled) {
if (hardCacheEnabled) checkImageStatus(); checkImageStatus();
}
// After appending new images, ensure the page is scrollable
ensureScrollable(); ensureScrollable();
} else { } else {
// No more images to load
noMoreImages = true; noMoreImages = true;
} }
isFetching = false; isFetching = false;
}) })
.catch(error => { .catch(error => {
clearTimeout(loadingTimer); clearTimeout(loadingTimer); // Clear the timer if fetch fails
hideLoadingMessage(); loadingIndicator.style.display = 'none'; // Hide the loading indicator
console.error('Fetch error:', error); console.error('Error fetching next page:', error);
isFetching = false; isFetching = false;
}); });
} }
/**
* Function to check image status via AJAX
*/
function checkImageStatus() { function checkImageStatus() {
if (!hardCacheEnabled || imageIds.length === 0) return; if (!hardCacheEnabled) return;
if (imageIds.length === 0) {
// No images to check, do nothing
return;
}
// Send AJAX request to check image status
fetch(`/image_status?image_ids=${imageIds.join(',')}`) fetch(`/image_status?image_ids=${imageIds.join(',')}`)
.then(response => response.json()) .then(response => response.json())
.then(statusMap => { .then(statusMap => {
const pendingImages = []; imageElements = imageElements.filter(img => {
const pendingIds = []; let id = img.getAttribute('data-id');
imageElements.forEach(img => {
const id = img.getAttribute('data-id');
if (statusMap[id]) { if (statusMap[id]) {
// Image is ready, update src
img.src = statusMap[id]; img.src = statusMap[id];
img.onload = () => removeLoadingEffects(img); img.onerror = function() {
img.onerror = () => handleImageError(img); handleImageError(img);
} else { };
pendingImages.push(img); // Remove the image id from the list
pendingIds.push(id); imageIds = imageIds.filter(imageId => imageId !== id);
return false; // Remove img from imageElements
} }
return true; // Keep img in imageElements
}); });
// After updating images, ensure the page is scrollable
imageElements = pendingImages;
imageIds = pendingIds;
ensureScrollable(); ensureScrollable();
}) })
.catch(error => { .catch(error => {
console.error('Status check error:', error); console.error('Error checking image status:', error);
}); });
} }
// Initialize with loading effects // Initialize imageElements and imageIds
document.querySelectorAll('img[data-id]').forEach(img => { imageElements = Array.from(document.querySelectorAll('img[data-id]'));
const id = img.getAttribute('data-id'); imageIds = imageElements
if (id) { .map(img => img.getAttribute('data-id'))
addLoadingEffects(img); .filter(id => id); // Exclude empty IDs
imageElements.push(img);
imageIds.push(id);
if (hardCacheEnabled) {
img.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
} else {
img.src = img.getAttribute('data-full');
img.onload = () => removeLoadingEffects(img);
}
img.onerror = () => handleImageError(img);
}
});
// Rest of your existing code remains unchanged
if (hardCacheEnabled) { if (hardCacheEnabled) {
imageStatusTimer = setInterval(checkImageStatus, imageStatusInterval); // Replace images with placeholders
checkImageStatus(); imageElements.forEach(img => {
img.src = '/static/images/placeholder.svg';
});
// Start checking image status
let imageStatusTimer = setInterval(checkImageStatus, imageStatusInterval);
checkImageStatus(); // Initial check
} else {
// HardCacheEnabled is false; load images immediately
imageElements.forEach(img => {
img.src = img.getAttribute('data-full');
img.onerror = function() {
handleImageError(img);
};
});
} }
// After initial images are loaded, ensure the page is scrollable
window.addEventListener('load', ensureScrollable); window.addEventListener('load', ensureScrollable);
window.addEventListener('scroll', () => {
// Infinite scrolling
window.addEventListener('scroll', function() {
if (isFetching || noMoreImages) return; if (isFetching || noMoreImages) return;
if (window.innerHeight + window.scrollY >= document.body.offsetHeight - scrollThreshold) { if (window.innerHeight + window.scrollY >= document.body.offsetHeight - scrollThreshold) {
// User scrolled near the bottom
fetchNextPage(); fetchNextPage();
} }
}); });
window.addEventListener('beforeunload', () => {
if (imageStatusTimer) clearInterval(imageStatusTimer);
});
})(); })();

View file

@ -28,41 +28,26 @@ document.addEventListener('DOMContentLoaded', function() {
<img id="viewer-image" class="view-image-img" src="" alt=""> <img id="viewer-image" class="view-image-img" src="" alt="">
</div> </div>
<p class="image-alt" id="viewer-title"></p> <p class="image-alt" id="viewer-title"></p>
<br> <p>
<div class="search-type-icons" style="display:flex; justify-content:center; flex-wrap: wrap;"> <a class="full-size" id="viewer-full-size-link" href="#" target="_blank">Show source website</a>
<div class="icon-button"> <a class="proxy-size" id="viewer-proxy-size-link" href="#" target="_blank">Show in fullscreen</a>
<button class="material-icons-round clickable btn-nostyle" id="viewer-copy-link"> </p>
<span class="material-icons-round">&#xe37c;</span>
<p>Copy link</p>
</button>
</div>
<div class="icon-button">
<button class="material-icons-round clickable btn-nostyle" id="viewer-open-image">
<span class="material-icons-round">&#xe193;</span>
<p>Open image</p>
</button>
</div>
<div class="icon-button">
<button class="material-icons-round clickable btn-nostyle" id="viewer-open-source">
<span class="material-icons-round">&#xe366;</span>
<p>Go to source</p>
</button>
</div>
<div class="icon-button">
<button class="material-icons-round clickable btn-nostyle" id="viewer-download-image">
<span class="material-icons-round">&#xe2d1;</span>
<p>Download</p>
</button>
</div>
</div>
</div> </div>
`; `;
const imageView = viewerOverlay.querySelector('#image-viewer'); const imageView = viewerOverlay.querySelector('#image-viewer');
if (!imageView) {
console.error('imageView is null');
}
const imagesContainer = document.querySelector('.images'); const imagesContainer = document.querySelector('.images');
if (!imagesContainer) {
console.error('imagesContainer is null');
}
function openImageViewer(element) { function openImageViewer(element) {
initializeImageList(); initializeImageList(); // Update the image list
const parentImageDiv = element.closest('.image'); const parentImageDiv = element.closest('.image');
if (!parentImageDiv) return; if (!parentImageDiv) return;
@ -76,62 +61,75 @@ document.addEventListener('DOMContentLoaded', function() {
document.body.classList.add('viewer-open'); document.body.classList.add('viewer-open');
viewerOverlay.style.display = 'block'; viewerOverlay.style.display = 'block';
imageView.classList.replace('image_hide', 'image_show'); imageView.classList.remove('image_hide');
imageView.classList.add('image_show');
} }
let fullImageUrl, sourceUrl, proxyFullUrl;
function displayImage(index) { function displayImage(index) {
if (index < 0 || index >= imageList.length) return; if (index < 0 || index >= imageList.length) return;
// Remove the `.image_selected` class from all images
imageList.forEach(img => { imageList.forEach(img => {
const parentImageDiv = img.closest('.image'); const parentImageDiv = img.closest('.image');
parentImageDiv?.classList.remove('image_selected'); if (parentImageDiv) {
parentImageDiv.classList.remove('image_selected');
}
}); });
const imgElement = imageList[index]; const imgElement = imageList[index];
const parentImageDiv = imgElement.closest('.image'); const parentImageDiv = imgElement.closest('.image');
parentImageDiv?.classList.add('image_selected');
fullImageUrl = imgElement.getAttribute('data-full') || imgElement.src; if (!parentImageDiv) {
sourceUrl = imgElement.getAttribute('data-source'); console.warn('Parent image div not found');
proxyFullUrl = imgElement.getAttribute('data-proxy-full') || fullImageUrl; return;
}
// Add the `.image_selected` class to the currently displayed image
parentImageDiv.classList.add('image_selected');
// Use the `data-full` attribute for the full image URL
let fullImageUrl = imgElement.getAttribute('data-full') || imgElement.src;
const title = imgElement.alt || 'Untitled';
// Get the source URL from the data-source attribute
const sourceUrl = imgElement.getAttribute('data-source');
// Fallback logic: if sourceUrl is null, use `data-proxy-full` or a meaningful default
const proxyFullUrl = imgElement.getAttribute('data-proxy-full') || fullImageUrl;
// Elements in the viewer
const viewerImage = document.getElementById('viewer-image'); const viewerImage = document.getElementById('viewer-image');
const viewerTitle = document.getElementById('viewer-title'); const viewerTitle = document.getElementById('viewer-title');
const fullSizeLink = document.getElementById('viewer-full-size-link');
const proxySizeLink = document.getElementById('viewer-proxy-size-link');
viewerTitle.textContent = imgElement.alt || 'Untitled'; viewerTitle.textContent = title;
fullSizeLink.href = sourceUrl || proxyFullUrl;
viewerImage.onerror = () => viewerImage.src = proxyFullUrl; // Remove previous event listeners to avoid stacking
viewerImage.onload = () => {}; viewerImage.onload = null;
viewerImage.onerror = null;
// Set up the error handler to switch to the proxy image if the full image fails to load
viewerImage.onerror = function() {
// Use the proxy image as a fallback
viewerImage.src = proxyFullUrl;
proxySizeLink.href = proxyFullUrl;
};
// Set up the load handler to ensure the proxySizeLink is set correctly if the image loads
viewerImage.onload = function() {
proxySizeLink.href = fullImageUrl;
};
// Start loading the image
viewerImage.src = fullImageUrl; viewerImage.src = fullImageUrl;
} }
document.getElementById('viewer-copy-link').onclick = () => { document.body.addEventListener('click', function(e) {
navigator.clipboard.writeText(window.location.origin + fullImageUrl).catch(console.error); let target = e.target;
}; let clickableElement = target.closest('img.clickable, .img_title.clickable');
document.getElementById('viewer-open-image').onclick = () => {
window.open(fullImageUrl, '_blank');
};
document.getElementById('viewer-open-source').onclick = () => {
window.open(sourceUrl || proxyFullUrl, '_blank');
};
document.getElementById('viewer-download-image').onclick = (event) => {
event.stopPropagation();
const a = document.createElement('a');
a.href = fullImageUrl;
a.download = fullImageUrl.split('/').pop();
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
};
document.body.addEventListener('click', e => {
const clickableElement = e.target.closest('img.clickable, .img_title.clickable');
if (clickableElement) { if (clickableElement) {
e.preventDefault(); e.preventDefault();
openImageViewer(clickableElement); openImageViewer(clickableElement);
@ -139,7 +137,8 @@ document.addEventListener('DOMContentLoaded', function() {
}); });
function closeImageViewer() { function closeImageViewer() {
imageView.classList.replace('image_show', 'image_hide'); imageView.classList.remove('image_show');
imageView.classList.add('image_hide');
viewerOpen = false; viewerOpen = false;
currentIndex = -1; currentIndex = -1;
@ -147,23 +146,56 @@ document.addEventListener('DOMContentLoaded', function() {
document.body.classList.remove('viewer-open'); document.body.classList.remove('viewer-open');
viewerOverlay.style.display = 'none'; viewerOverlay.style.display = 'none';
imageList.forEach(img => img.closest('.image')?.classList.remove('image_selected')); // Remove `.image_selected` from all images
imageList.forEach(img => {
const parentImageDiv = img.closest('.image');
if (parentImageDiv) {
parentImageDiv.classList.remove('image_selected');
}
});
} }
document.getElementById('viewer-close-button').onclick = closeImageViewer; // Navigation functions
document.getElementById('viewer-prev-button').onclick = () => currentIndex > 0 && displayImage(--currentIndex); function showPreviousImage() {
document.getElementById('viewer-next-button').onclick = () => currentIndex < imageList.length - 1 && displayImage(++currentIndex); if (currentIndex > 0) {
currentIndex--;
displayImage(currentIndex);
}
}
document.addEventListener('click', e => { function showNextImage() {
if (viewerOpen && !viewerOverlay.contains(e.target) && !e.target.closest('.image')) { if (currentIndex < imageList.length - 1) {
closeImageViewer(); currentIndex++;
displayImage(currentIndex);
}
}
// Event listeners for navigation and closing
document.getElementById('viewer-close-button').addEventListener('click', closeImageViewer);
document.getElementById('viewer-prev-button').addEventListener('click', showPreviousImage);
document.getElementById('viewer-next-button').addEventListener('click', showNextImage);
// Close viewer when clicking outside the image
document.addEventListener('click', function(e) {
if (viewerOpen) {
const target = e.target;
const clickedInsideViewer = viewerOverlay.contains(target) || target.closest('.image');
if (!clickedInsideViewer) {
closeImageViewer();
}
} }
}); });
document.addEventListener('keydown', e => { // Handle keyboard events for closing and navigation
if (!viewerOpen) return; document.addEventListener('keydown', function(e) {
if (e.key === 'Escape') closeImageViewer(); if (viewerOpen) {
if (e.key === 'ArrowLeft' && currentIndex > 0) displayImage(--currentIndex); if (e.key === 'Escape') {
if (e.key === 'ArrowRight' && currentIndex < imageList.length - 1) displayImage(++currentIndex); closeImageViewer();
} else if (e.key === 'ArrowLeft') {
showPreviousImage();
} else if (e.key === 'ArrowRight') {
showNextImage();
}
}
}); });
}); });

View file

@ -103,10 +103,6 @@
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <!-- Video icon --> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <!-- Video icon -->
<button name="t" value="video" class="clickable">{{ translate "videos" }}</button> <button name="t" value="video" class="clickable">{{ translate "videos" }}</button>
</div> </div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn"> <div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <!-- Forum icon --> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <!-- Forum icon -->
<button name="t" value="forum" class="clickable">{{ translate "forums" }}</button> <button name="t" value="forum" class="clickable">{{ translate "forums" }}</button>
@ -124,6 +120,8 @@
</div> </div>
</form> </form>
<p class="fetched fetched_dif fetched_tor">{{ translate "fetched_in" .Fetched }}</p>
{{ if .Results }} {{ if .Results }}
<form action="/search" class="torrent-sort" method="GET"> <form action="/search" class="torrent-sort" method="GET">
<input type="hidden" name="q" value="{{ .Query }}"> <input type="hidden" name="q" value="{{ .Query }}">
@ -149,8 +147,6 @@
</select> </select>
<button type="submit" class="torrent-sort-save">{{ translate "apply_settings" }}</button> <button type="submit" class="torrent-sort-save">{{ translate "apply_settings" }}</button>
</form> </form>
<p class="fetched fetched_dif_files fetched_tor">{{ translate "fetched_in" .Fetched }}</p>
<div class="clean"> <div class="clean">
{{ range .Results }} {{ range .Results }}
<div class="results" id="results"> <div class="results" id="results">

View file

@ -10,7 +10,6 @@
<link rel="stylesheet" href="/static/css/style.css"> <link rel="stylesheet" href="/static/css/style.css">
<link rel="stylesheet" href="/static/css/{{.Theme}}.css"> <link rel="stylesheet" href="/static/css/{{.Theme}}.css">
<link rel="stylesheet" href="/static/css/style-fonts.css"> <link rel="stylesheet" href="/static/css/style-fonts.css">
<link rel="stylesheet" href="/static/css/style-loadingindicator.css">
<link rel="stylesheet" href="/static/css/style-menu.css"> <link rel="stylesheet" href="/static/css/style-menu.css">
<link rel="search" type="application/opensearchdescription+xml" title="{{ translate "site_name" }}" href="/opensearch.xml"> <link rel="search" type="application/opensearchdescription+xml" title="{{ translate "site_name" }}" href="/opensearch.xml">
<!-- Icons --> <!-- Icons -->
@ -104,10 +103,6 @@
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <!-- Video icon --> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <!-- Video icon -->
<button name="t" value="video" class="clickable">{{ translate "videos" }}</button> <button name="t" value="video" class="clickable">{{ translate "videos" }}</button>
</div> </div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn"> <div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable search-active" name="t" value="forum">&#xe0bf;</button> <!-- Forum icon --> <button id="sub-search-wrapper-ico" class="material-icons-round clickable search-active" name="t" value="forum">&#xe0bf;</button> <!-- Forum icon -->
<button name="t" value="forum" class="clickable search-active">{{ translate "forums" }}</button> <button name="t" value="forum" class="clickable search-active">{{ translate "forums" }}</button>
@ -137,8 +132,6 @@
</select> </select>
<button class="results-save" name="t" value="forum">{{ translate "save_settings" }}</button> <button class="results-save" name="t" value="forum">{{ translate "save_settings" }}</button>
</form> </form>
<p class="fetched fetched_dif_files fetched_tor">{{ translate "fetched_in" .Fetched }}</p>
<div class="results" id="results"> <div class="results" id="results">
{{if .Results}} {{if .Results}}
{{range .Results}} {{range .Results}}
@ -158,8 +151,8 @@
<div class="no-results-found">{{ translate "no_more_results" }}</div> <div class="no-results-found">{{ translate "no_more_results" }}</div>
{{end}} {{end}}
</div> </div>
<div id="message-bottom-right" class="message-bottom-right"> <div class="message-bottom-left" id="message-bottom-left">
<span id="loading-text">{{ translate "searching_for_new_results" }}</span><span class="dot">.</span><span class="dot">.</span><span class="dot">.</span> <span>{{ translate "searching_for_new_results" }}</span>
</div> </div>
<div class="prev-next prev-img" id="prev-next"> <div class="prev-next prev-img" id="prev-next">
<form action="/search" method="get"> <form action="/search" method="get">

View file

@ -15,10 +15,8 @@
} }
</style> </style>
</noscript> </noscript>
<link rel="stylesheet" href="/static/css/style-imageloading.css">
<link rel="stylesheet" href="/static/css/style-imageviewer.css"> <link rel="stylesheet" href="/static/css/style-imageviewer.css">
<link rel="stylesheet" href="/static/css/style-fixedwidth.css"> <link rel="stylesheet" href="/static/css/style-fixedwidth.css">
<link rel="stylesheet" href="/static/css/style-loadingindicator.css">
<link rel="stylesheet" href="/static/css/style-menu.css"> <link rel="stylesheet" href="/static/css/style-menu.css">
<link rel="stylesheet" href="/static/css/{{.Theme}}.css"> <link rel="stylesheet" href="/static/css/{{.Theme}}.css">
<link rel="stylesheet" href="/static/css/style-fonts.css"> <link rel="stylesheet" href="/static/css/style-fonts.css">
@ -115,10 +113,6 @@
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button>
<button name="t" value="video" class="clickable">{{ translate "videos" }}</button> <button name="t" value="video" class="clickable">{{ translate "videos" }}</button>
</div> </div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn"> <div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button>
<button name="t" value="forum" class="clickable">{{ translate "forums" }}</button> <button name="t" value="forum" class="clickable">{{ translate "forums" }}</button>
@ -153,7 +147,6 @@
</form> </form>
<div class="search-results" id="results"> <div class="search-results" id="results">
<p class="fetched fetched_dif_images fetched_vid">{{ translate "fetched_in" .Fetched }}</p>
<!-- Results go here --> <!-- Results go here -->
{{ if .Results }} {{ if .Results }}
<div class="images images_viewer_hidden"> <div class="images images_viewer_hidden">
@ -241,8 +234,8 @@
<div class="no-results-found no-results-found-offset">{{ translate "no_more_results" }}</div> <div class="no-results-found no-results-found-offset">{{ translate "no_more_results" }}</div>
{{ end }} {{ end }}
</div> </div>
<div id="message-bottom-right" class="message-bottom-right"> <div class="message-bottom-left" id="message-bottom-left">
<span id="loading-text">{{ translate "searching_for_new_results" }}</span><span class="dot">.</span><span class="dot">.</span><span class="dot">.</span> <span>{{ translate "searching_for_new_results" }}</span>
</div> </div>
<div id="image-viewer-overlay" style="display: none;"></div> <div id="image-viewer-overlay" style="display: none;"></div>

View file

@ -118,10 +118,6 @@
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <!-- Movie icon --> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <!-- Movie icon -->
<button name="t" value="video" class="clickable">{{ translate "videos" }}</button> <button name="t" value="video" class="clickable">{{ translate "videos" }}</button>
</div> </div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn"> <div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <!-- Forum icon --> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <!-- Forum icon -->
<button name="t" value="forum" class="clickable">{{ translate "forums" }}</button> <button name="t" value="forum" class="clickable">{{ translate "forums" }}</button>

View file

@ -1,192 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{ if .IsThemeDark }}
<meta name="darkreader-lock">
{{ end }}
<title>{{ .Query }} - Music Search - {{ translate "site_name" }}</title>
<link rel="stylesheet" href="/static/css/style.css">
<link rel="stylesheet" href="/static/css/{{.Theme}}.css">
<link rel="stylesheet" href="/static/css/style-fonts.css">
<link rel="stylesheet" href="/static/css/style-music.css">
<link rel="stylesheet" href="/static/css/style-loadingindicator.css">
<link rel="stylesheet" href="/static/css/style-menu.css">
<link rel="search" type="application/opensearchdescription+xml" title="{{ translate "site_name" }}" href="/opensearch.xml">
<!-- Icons -->
<link rel="icon" href="{{ .IconPathSVG }}" type="image/svg+xml">
<link rel="icon" href="{{ .IconPathPNG }}" type="image/png">
<link rel="apple-touch-icon" href="{{ .IconPathPNG }}">
</head>
<body>
<!-- Menu Button -->
<div id="content" class="js-enabled">
<div class="settings-search-div settings-search-div-search">
<button class="material-icons-round clickable settings-icon-link settings-icon-link-search">&#xe5d2;</button>
</div>
<div class="search-menu settings-menu-hidden">
<h2>Settings</h2>
<div class="settings-content">
<button id="settingsButton" onclick="window.location.href='/settings'">All settings</button>
<div class="theme-settings">
<p><span class="highlight">Current theme: </span> <span id="theme_name">{{.Theme}}</span></p>
<div class="themes-settings-menu">
<div><img class="view-image-search clickable" id="dark_theme" alt="Dark Theme" src="/static/images/dark.webp"></div>
<div><img class="view-image-search clickable" id="light_theme" alt="Light Theme" src="/static/images/light.webp"></div>
</div>
</div>
<select class="lang" name="safe" id="safeSearchSelect">
<option value="disabled" {{if eq .Safe "disabled"}}selected{{end}}>Safe Search Off</option>
<option value="active" {{if eq .Safe "active"}}selected{{end}}>Safe Search On</option>
</select>
<select class="lang" name="lang" id="languageSelect">
{{range .LanguageOptions}}
<option value="{{.Code}}" {{if eq .Code $.CurrentLang}}selected{{end}}>{{.Name}}</option>
{{end}}
</select>
<button id="aboutQGatoBtn">About QGato</button>
</div>
</div>
</div>
<noscript>
<div class="settings-search-div settings-search-div-search">
<a href="/settings" class="material-icons-round clickable settings-icon-link settings-icon-link-search">&#xe5d2;</a>
</div>
</noscript>
<!-- Popup Modal for QGato -->
<div id="aboutQGatoModal">
<!-- Close Button -->
<button class="btn-nostyle" id="close-button">
<div class="material-icons-round icon_visibility clickable cloase-btn">&#xe5cd;</div>
</button>
<div class="modal-content">
<img
src="/static/images/icon.svg"
alt="QGato"
>
<h2>QGato</h2>
<p>A open-source private search engine.</p>
<div class="button-container">
<button onclick="window.location.href='https://weforge.xyz/Spitfire/Search'">Source Code</button>
<button onclick="window.location.href='/privacy'">Privacy policy</button>
</div>
</div>
</div>
<form action="/music" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
<h1 class="logomobile">
<div class="logo-container" href="/">
<a href="/">
<svg xmlns="http://www.w3.org/2000/svg" width="29" height="86" viewBox="0 0 29 86"><path fill-rule="evenodd" d="M-44.35.78C-70.8 6.76-74.8 43.17-50.67 55.73c1.7.88 4.42 1.7 7.83 2.22 4.48.68 9.6.86 9.58.15-.04-1.43-7.3-8.28-8.67-8.28-3.15 0-9.94-5.66-11.97-10C-61.95 22.66-48.1 4.54-31.12 10c13.5 4.34 18.1 22.7 8.66 34.44-1.85 2.3-1.75 2.3-4.4-.22-4.8-4.59-8.57-5.25-11.98-2.1-2.18 2-2.15 2.66.15 4.14 1.9 1.22 13.4 12.95 17.49 17.83 4.3 5.13 5.24 6.14 7.52 7.97C-9.25 75.6-1.23 77.91 1 76.28c.67-.5 1.86-7.8 1.35-8.3-.12-.12-1.34-.4-2.7-.61-5.36-.86-9.23-3.46-14.2-9.55-3.49-4.27-4.12-5.26-3.38-5.26 2.54 0 8.05-8.62 9.86-15.43C-2.36 15.63-22.18-4.23-44.35.78m65.13 1.53C4.92 6.02-4.86 22.36-.72 38.24 3 52.62 18.43 59.63 33.67 57.64c4.7-.62 2.43-.66 4.45-.8s6.45-.01 6.93-.2c.4.03.72-.45.72-.94V42.31c0-7.36-.16-13.62-.33-13.9-.26-.4-2.36-.49-10.19-.4-11.44.15-10.96-.03-10.96 4.09 0 2.44-.04 3.99 1.17 4.7 1.13.68 3.43.59 6.68.41l3.76-.2.27 5.68c.33 6.59.57 6.15-3.64 6.7-15.53 2.04-24-5.02-23.37-19.43.66-15.1 12.2-22.78 26.96-17.94 4.5 1.47 4.4 1.52 6.16-2.8 1.5-3.68 1.5-3.69-.82-4.69C36.03 2.2 25.9 1.11 20.78 2.31m78.83.8c-2.87.76-2.9.84-3.15 6.12-.25 5.56.12 4.96-3.35 5.29-3.43.32-3.32.15-2.76 4.2.61 4.37.6 4.34 3.76 4.34h2.65v12.7c0 14.5 1.55 16.33 3.5 18.3 3.6 3.48 9.59 4.92 14.93 3.06 2.45-.85 2.43-.8 2.18-4.95-.25-4.1-.43-3.5-3.16-2.91-7.73 1.64-8.27.6-8.27-15.05V22.87h5.66l5.34-.1c.67-.01.97.4 1.28-3.9.35-4.8-.2-4.01-.8-4.14l-5.82.18-5.66.26v-5.16c0-5.84-.2-6.48-2.25-7.04-1.75-.49-1.76-.49-4.08.13m-34.5 11.02c-2.64.38-4.71 1.04-8.54 2.72l-4.03 1.76c-1.09.39-.28 1.29.69 3.89 1.06 2.75 1.35 3.35 2.11 3.03.76-.32.7-.23 1.43-.65 9.08-5.25 20.26-2.63 20.26 4.74v2.14l-5.95.2c-13.84.48-20.29 4.75-20.38 13.51-.13 12.4 14.18 17.22 24.62 8.3l2.3-1.97.23 1.85c.32 2.53.6 3.06 2.04 3.67 1.42.6 7.16.62 7.75.03.77-.77.37-6-.25-6.34-.94-.5-.77-1.57-.88-12.63-.15-14.87-.5-16.5-4.4-20.13-3.03-2.84-11.55-4.9-17-4.12m72.86 0c-27.2 5.27-24.13 43.96 3.47 43.9 14.67-.04 24.4-12.77 21.53-28.16-1.86-9.95-14.33-17.8-25-15.73m8.29 8.96c6.88 2.34 9.61 11.51 5.9 19.79-4.13 9.19-17.89 9.17-22.14-.03-1.32-2.85-1.24-10.79.14-13.54 3-6 9.45-8.49 16.1-6.22m-68.84 18.5v3.09l-1.85 1.63c-7.46 6.58-16.36 5.49-15.6-1.9.45-4.35 3.62-5.77 13.06-5.87l4.4-.05v3.1" style="fill:currentColor" transform="translate(-31.68 4.9)"/><path d="M-13.47 73.3v1.11q-.65-.3-1.23-.46-.57-.15-1.11-.15-.93 0-1.44.36-.5.36-.5 1.03 0 .56.33.85.34.28 1.28.46l.69.14q1.27.24 1.88.86.6.6.6 1.64 0 1.22-.82 1.86-.82.63-2.4.63-.6 0-1.28-.14-.68-.13-1.4-.4v-1.17q.7.39 1.36.58.67.2 1.31.2.98 0 1.51-.38.54-.39.54-1.1 0-.62-.39-.97-.38-.35-1.25-.53l-.7-.13q-1.27-.26-1.84-.8-.57-.54-.57-1.51 0-1.12.78-1.76.8-.65 2.18-.65.6 0 1.2.1.63.12 1.27.33zm2.29-.28h5.34V74h-4.2v2.5h4.02v.96h-4.02v3.05h4.3v.97h-5.44zm10.14 1.13-1.55 4.2H.5zm-.65-1.13h1.3l3.21 8.45H1.64L.87 79.3h-3.8l-.78 2.17h-1.2zm9.75 4.48q.37.13.71.54.35.4.7 1.12l1.16 2.3H9.41L8.33 79.3q-.42-.85-.82-1.13-.39-.27-1.07-.27H5.2v3.57H4.06v-8.45h2.58q1.44 0 2.16.6.7.61.7 1.84 0 .8-.36 1.32-.37.52-1.08.73zM5.2 73.97v3h1.44q.82 0 1.24-.38.42-.38.42-1.12 0-.75-.42-1.12-.42-.38-1.24-.38zm12.65-.3v1.2q-.58-.53-1.23-.8-.65-.26-1.39-.26-1.45 0-2.22.89-.77.88-.77 2.55t.77 2.56q.77.88 2.22.88.74 0 1.39-.26.65-.27 1.23-.8v1.19q-.6.4-1.27.6-.67.21-1.42.21-1.91 0-3.02-1.17-1.1-1.18-1.1-3.2 0-2.04 1.1-3.21 1.1-1.18 3.02-1.18.76 0 1.43.2.67.2 1.26.6zm1.76-.65h1.14v3.46h4.15v-3.46h1.15v8.45H24.9v-4.02h-4.15v4.02h-1.14zm12.39 0h5.34V74h-4.2v2.5h4.02v.96h-4.02v3.05h4.3v.97H32zm7.32 0h1.53l3.75 7.07v-7.07h1.1v8.45h-1.53l-3.74-7.07v7.07h-1.11zm14.42 7.24V78h-1.87v-.93h3v3.62q-.67.47-1.46.71-.8.24-1.7.24-1.98 0-3.1-1.15-1.12-1.16-1.12-3.23 0-2.07 1.12-3.22 1.12-1.16 3.1-1.16.82 0 1.56.2.75.2 1.38.6v1.22q-.64-.54-1.35-.8-.71-.28-1.5-.28-1.55 0-2.33.86-.77.87-.77 2.58t.77 2.58q.78.86 2.33.86.6 0 1.08-.1.48-.1.86-.33zm3.21-7.24h1.14v8.45h-1.14zm3.42 0h1.54l3.74 7.07v-7.07h1.1v8.45h-1.53l-3.74-7.07v7.07h-1.11zm8.66 0h5.34V74h-4.2v2.5h4.02v.96h-4.02v3.05h4.3v.97h-5.44z" aria-label="SEARCH ENGINE" style="font-family:'ADLaM Display';white-space:pre;fill:currentColor"/></svg>
</a>
</div>
</h1>
<div class="wrapper-results">
<input type="text" name="q" value="{{ .Query }}" id="search-input" />
<button id="search-wrapper-ico" class="material-icons-round" name="t" value="music">&#xe8b6;</button>
<div class="autocomplete">
<ul></ul>
</div>
<input type="submit" class="hide" name="t" value="music" />
</div>
<div class="sub-search-button-wrapper">
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="text">&#xe8b6;</button>
<button name="t" value="text" class="clickable">{{ translate "web" }}</button>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="image">&#xe3f4;</button>
<button name="t" value="image" class="clickable">{{ translate "images" }}</button>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button>
<button name="t" value="video" class="clickable">{{ translate "videos" }}</button>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable search-active" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable search-active">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button>
<button name="t" value="forum" class="clickable">{{ translate "forums" }}</button>
</div>
<div id="content2" class="js-enabled">
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="map">&#xe55b;</button>
<button name="t" value="map" class="clickable">{{ translate "maps" }}</button>
</div>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="file">&#xe80d;</button>
<button name="t" value="file" class="clickable">{{ translate "torrents" }}</button>
</div>
</div>
</form>
<form class="results_settings" action="/music" method="get">
</form>
<p class="fetched fetched_dif_files">{{ translate "fetched_in" .Fetched }}</p>
<div class="results" id="results">
{{if .Results}}
{{range .Results}}
<div class="result-item music-item">
<div class="music-thumbnail">
<a href="{{.URL}}">
<img src="{{.Thumbnail}}" alt="{{.Title}} thumbnail" loading="lazy">
{{if .Duration}}<div class="duration-overlay">{{.Duration}}</div>{{end}}
</a>
</div>
<div class="music-info">
<a href="{{.URL}}"><h3 class="video_title">{{.Title}}</h3></a>
<div class="stats">
<span class="artist">{{.Artist}}</span>
<span class="pipe">|</span>
<span class="source">{{.Source}}</span>
</div>
</div>
</div>
{{end}}
{{else if .NoResults}}
<div class="no-results-found">
{{ translate "no_results_found" .Query }}<br>
{{ translate "suggest_rephrase" }}
</div>
{{else}}
<div class="no-results-found">{{ translate "no_more_results" }}</div>
{{end}}
</div>
<div id="message-bottom-right" class="message-bottom-right">
<span id="loading-text">{{ translate "searching_for_new_results" }}</span><span class="dot">.</span><span class="dot">.</span><span class="dot">.</span>
</div>
<div class="prev-next prev-img" id="prev-next">
<form action="/music" method="get">
<input type="hidden" name="q" value="{{ .Query }}">
<input type="hidden" name="t" value="music">
<noscript>
{{ if .HasPrevPage }}
<button type="submit" name="p" value="{{ sub .Page 1 }}">{{ translate "previous" }}</button>
{{ end }}
{{ if .HasNextPage }}
<button type="submit" name="p" value="{{ add .Page 1 }}">{{ translate "next" }}</button>
{{ end }}
</noscript>
</form>
</div>
<div id="template-data" data-page="{{ .Page }}" data-query="{{ .Query }}" data-type="music"></div>
<script defer src="/static/js/dynamicscrolling.js"></script>
<script defer src="/static/js/autocomplete.js"></script>
<script defer src="/static/js/minimenu.js"></script>
<script>
document.querySelectorAll('.js-enabled').forEach(el => el.classList.remove('js-enabled'));
// Handle music service selection
document.getElementById('musicServiceSelect').addEventListener('change', function() {
const form = this.closest('form');
form.submit();
});
</script>
</body>
</html>

View file

@ -122,13 +122,6 @@
</button> </button>
</div> </div>
<div class="icon-button">
<button id="sub-search-wrapper-ico-video" class="material-icons-round clickable" name="t" value="music">
<span class="material-icons-round">&#xe423;</span> <!-- 'note' icon -->
<p>{{ translate "music" }}</p>
</button>
</div>
<div class="icon-button"> <div class="icon-button">
<button id="sub-search-wrapper-ico-forum" class="material-icons-round clickable" name="t" value="forum"> <button id="sub-search-wrapper-ico-forum" class="material-icons-round clickable" name="t" value="forum">
<span class="material-icons-round">&#xe0bf;</span> <!-- 'forum' icon --> <span class="material-icons-round">&#xe0bf;</span> <!-- 'forum' icon -->

View file

@ -10,7 +10,6 @@
<link rel="stylesheet" href="/static/css/style.css"> <link rel="stylesheet" href="/static/css/style.css">
<link rel="stylesheet" href="/static/css/{{.Theme}}.css"> <link rel="stylesheet" href="/static/css/{{.Theme}}.css">
<link rel="stylesheet" href="/static/css/style-fonts.css"> <link rel="stylesheet" href="/static/css/style-fonts.css">
<link rel="stylesheet" href="/static/css/style-loadingindicator.css">
<link rel="stylesheet" href="/static/css/style-menu.css"> <link rel="stylesheet" href="/static/css/style-menu.css">
<link rel="search" type="application/opensearchdescription+xml" title="{{ translate "site_name" }}" href="/opensearch.xml"> <link rel="search" type="application/opensearchdescription+xml" title="{{ translate "site_name" }}" href="/opensearch.xml">
<!-- Icons --> <!-- Icons -->
@ -105,10 +104,6 @@
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="video">&#xe02c;</button>
<button name="t" value="video" class="clickable">{{ translate "videos" }}</button> <button name="t" value="video" class="clickable">{{ translate "videos" }}</button>
</div> </div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn"> <div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button>
<button name="t" value="forum" class="clickable">{{ translate "forums" }}</button> <button name="t" value="forum" class="clickable">{{ translate "forums" }}</button>
@ -138,8 +133,6 @@
</select> </select>
<button class="results-save" name="t" value="text">{{ translate "save_settings" }}</button> <button class="results-save" name="t" value="text">{{ translate "save_settings" }}</button>
</form> </form>
<p class="fetched fetched_dif_files fetched_tor">{{ translate "fetched_in" .Fetched }}</p>
<div class="results" id="results"> <div class="results" id="results">
{{if .Results}} {{if .Results}}
{{range .Results}} {{range .Results}}
@ -159,8 +152,8 @@
<div class="no-results-found">{{ translate "no_more_results" }}</div> <div class="no-results-found">{{ translate "no_more_results" }}</div>
{{end}} {{end}}
</div> </div>
<div id="message-bottom-right" class="message-bottom-right"> <div class="message-bottom-left" id="message-bottom-left">
<span id="loading-text">{{ translate "searching_for_new_results" }}</span><span class="dot">.</span><span class="dot">.</span><span class="dot">.</span> <span>{{ translate "searching_for_new_results" }}</span>
</div> </div>
<div class="prev-next prev-img" id="prev-next"> <div class="prev-next prev-img" id="prev-next">
<form action="/search" method="get"> <form action="/search" method="get">

View file

@ -103,10 +103,6 @@
<button id="sub-search-wrapper-ico" class="material-icons-round clickable search-active" name="t" value="video">&#xe02c;</button> <button id="sub-search-wrapper-ico" class="material-icons-round clickable search-active" name="t" value="video">&#xe02c;</button>
<button name="t" value="video" class="clickable search-active">{{ translate "videos" }}</button> <button name="t" value="video" class="clickable search-active">{{ translate "videos" }}</button>
</div> </div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="music">&#xe423;</button>
<button name="t" value="music" class="clickable">{{ translate "music" }}</button>
</div>
<div class="search-container-results-btn"> <div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button> <button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="forum">&#xe0bf;</button>
<button name="t" value="forum" class="clickable">{{ translate "forums" }}</button> <button name="t" value="forum" class="clickable">{{ translate "forums" }}</button>
@ -124,7 +120,7 @@
</div> </div>
</form> </form>
<!-- Results go here --> <!-- Results go here -->
<p class="fetched fetched_dif_videos fetched_vid">{{ translate "fetched_in" .Fetched }}</p> <p class="fetched fetched_dif fetched_vid">{{ translate "fetched_in" .Fetched }}</p>
{{ if .Results }} {{ if .Results }}
{{ range .Results }} {{ range .Results }}
<div> <div>

View file

@ -2,6 +2,7 @@ package tests
import ( import (
"bufio" "bufio"
"context"
"crypto/rand" "crypto/rand"
"encoding/json" "encoding/json"
"fmt" "fmt"
@ -9,7 +10,9 @@ import (
"math/big" "math/big"
"net/http" "net/http"
"net/url" "net/url"
"os"
"os/exec" "os/exec"
"path/filepath"
"sync" "sync"
"syscall" "syscall"
"testing" "testing"
@ -53,27 +56,50 @@ func TestApplication(t *testing.T) {
// Ensure the test runs from the root directory // Ensure the test runs from the root directory
rootDir := "../" // Path to the root directory of the repository rootDir := "../" // Path to the root directory of the repository
// Run the application using `run.sh` // Build the application using `run.sh --build`
runCmd := exec.Command("sh", "./run.sh", "--skip-config-check") buildCmd := exec.Command("sh", "./run.sh", "--build")
runCmd.Dir = rootDir buildCmd.Dir = rootDir
buildOutput, err := buildCmd.CombinedOutput()
if err != nil {
t.Fatalf("Failed to build application: %v\nOutput:\n%s", err, string(buildOutput))
}
t.Log("Application built successfully")
// Path to the built executable relative to rootDir
executablePath := "./qgato" // Since cmd.Dir is rootDir, this path is relative to rootDir
// Ensure the executable has execute permissions
execFullPath := filepath.Join(rootDir, "qgato")
if err := os.Chmod(execFullPath, 0755); err != nil {
t.Fatalf("Failed to set execute permissions on the executable: %v", err)
}
// Create a context with cancellation
ctx, cancel := context.WithCancel(context.Background())
defer cancel() // Ensure resources are cleaned up
// Start the application using the built executable
cmd := exec.CommandContext(ctx, executablePath, "--skip-config-check")
cmd.Dir = rootDir // Set the working directory to the root directory
// Set process group ID so we can kill it and its children // Set process group ID so we can kill it and its children
runCmd.SysProcAttr = &syscall.SysProcAttr{ cmd.SysProcAttr = &syscall.SysProcAttr{
Setpgid: true, Setpgid: true,
} }
// Capture application output for logging // Capture application output for logging
appStdout, err := runCmd.StdoutPipe() appStdout, err := cmd.StdoutPipe()
if err != nil { if err != nil {
t.Fatalf("Failed to capture stdout: %v", err) t.Fatalf("Failed to capture stdout: %v", err)
} }
appStderr, err := runCmd.StderrPipe() appStderr, err := cmd.StderrPipe()
if err != nil { if err != nil {
t.Fatalf("Failed to capture stderr: %v", err) t.Fatalf("Failed to capture stderr: %v", err)
} }
// Start the application // Start the application
if err := runCmd.Start(); err != nil { if err := cmd.Start(); err != nil {
t.Fatalf("Failed to start application: %v", err) t.Fatalf("Failed to start application: %v", err)
} }
@ -94,28 +120,28 @@ func TestApplication(t *testing.T) {
// Defer cleanup to ensure process is killed after the test // Defer cleanup to ensure process is killed after the test
defer func() { defer func() {
// Kill the process group // Kill the process group
pgid, err := syscall.Getpgid(runCmd.Process.Pid) pgid, err := syscall.Getpgid(cmd.Process.Pid)
if err == nil { if err == nil {
syscall.Kill(-pgid, syscall.SIGKILL) syscall.Kill(-pgid, syscall.SIGKILL)
} else { } else {
t.Logf("Failed to get process group ID: %v", err) t.Logf("Failed to get process group ID: %v", err)
runCmd.Process.Kill() cmd.Process.Kill()
} }
runCmd.Wait() cmd.Wait()
// Print summary // Print summary
printSummary(summary, t) printSummary(summary, t)
}() }()
// Wait for the server to start // Wait for the server to start
if !waitForServer("http://localhost:5000", 600*time.Second) { if !waitForServer("http://localhost:5000", 15*time.Second) {
t.Fatalf("Server did not start within the expected time") t.Fatalf("Server did not start within the expected time")
} }
t.Log("Application is running") t.Log("Application is running")
// Create a process instance for the application // Create a process instance for the application
appProcess, err := process.NewProcess(int32(runCmd.Process.Pid)) appProcess, err := process.NewProcess(int32(cmd.Process.Pid))
if err != nil { if err != nil {
t.Fatalf("Failed to create process instance: %v", err) t.Fatalf("Failed to create process instance: %v", err)
} }

View file

@ -2,7 +2,7 @@ package main
import ( import (
"fmt" "fmt"
"io" "io/ioutil"
"net/http" "net/http"
"net/url" "net/url"
"strings" "strings"
@ -17,83 +17,62 @@ func PerformBraveTextSearch(query, safe, lang string, offset int) ([]TextSearchR
var results []TextSearchResult var results []TextSearchResult
// Build the search URL // Build the search URL
searchURL := fmt.Sprintf("https://search.brave.com/search?q=%s", url.QueryEscape(query)) searchURL := fmt.Sprintf("https://search.brave.com/search?q=%s&offset=%d", url.QueryEscape(query), offset)
if offset > 1 {
searchURL += fmt.Sprintf("&offset=%d&spellcheck=0", offset-1)
}
req, err := http.NewRequest("GET", searchURL, nil) req, err := http.NewRequest("GET", searchURL, nil)
if err != nil { if err != nil {
printWarn("Error creating request: %v", err)
return nil, 0, fmt.Errorf("creating request: %v", err) return nil, 0, fmt.Errorf("creating request: %v", err)
} }
TextUserAgent, err := GetUserAgent("Text-Search-Brave") // Set headers including User-Agent
TextUserAgent, err := GetUserAgent("Text-Search")
if err != nil { if err != nil {
printWarn("Error generating User-Agent: %v", err)
return nil, 0, err return nil, 0, err
} }
req.Header.Set("User-Agent", TextUserAgent) req.Header.Set("User-Agent", TextUserAgent)
// Single call to DoMetaProxyRequest: // Perform the HTTP request
resp, err := DoMetaProxyRequest(req) client := &http.Client{}
resp, err := client.Do(req)
if err != nil { if err != nil {
printWarn("Error performing request: %v", err) return nil, 0, fmt.Errorf("performing request: %v", err)
return nil, 0, fmt.Errorf("performing meta-request: %v", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
// Read the response body // Read the response body
body, err := io.ReadAll(resp.Body) body, err := ioutil.ReadAll(resp.Body)
if err != nil { if err != nil {
printWarn("Error reading response body: %v", err)
return nil, 0, fmt.Errorf("reading response body: %v", err) return nil, 0, fmt.Errorf("reading response body: %v", err)
} }
// Parse the response body // Parse the response body
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body))) doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("parsing HTML: %v", err) return nil, 0, fmt.Errorf("parsing response body: %v", err)
} }
// Only grab .snippet blocks that have data-type="web" // Extract search results
doc.Find(`.snippet[data-type="web"]`).Each(func(i int, s *goquery.Selection) { doc.Find(".snippet").Each(func(i int, s *goquery.Selection) {
title := s.Find(".title").Text()
description := s.Find(".snippet-description").Text()
url, exists := s.Find("a").Attr("href")
// The main clickable link is <a ... class="heading-serpresult"> // Add to results only if all components are present
anchor := s.Find("a.heading-serpresult").First() if title != "" && description != "" && exists && url != "" {
link, ok := anchor.Attr("href")
if !ok || link == "" {
return
}
// Title is inside <div class="title">
title := strings.TrimSpace(anchor.Find(".title").Text())
if title == "" {
// fallback if the .title is slightly off in the DOM
title = strings.TrimSpace(s.Find(".title").Text())
}
// Description is inside <div class="snippet-description">
desc := strings.TrimSpace(s.Find(".snippet-description").Text())
// Add only if everything is non-empty
if title != "" && desc != "" {
results = append(results, TextSearchResult{ results = append(results, TextSearchResult{
Header: title, Header: title,
URL: link, URL: url,
Description: desc, Description: description,
}) })
} }
}) })
duration := time.Since(startTime) duration := time.Since(startTime) // Calculate the duration
// Return an error if no results are found // Return an error if no results are found
if len(results) == 0 { if len(results) == 0 {
printDebug("No results found for query")
return nil, duration, fmt.Errorf("no results found") return nil, duration, fmt.Errorf("no results found")
} }
printDebug("Search completed successfully found %d results", len(results))
return results, duration, nil return results, duration, nil
} }

View file

@ -16,36 +16,21 @@ func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSear
var results []TextSearchResult var results []TextSearchResult
searchURL := buildDuckDuckGoSearchURL(query, page) searchURL := buildDuckDuckGoSearchURL(query, page)
// Create a request resp, err := http.Get(searchURL)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err) return nil, 0, fmt.Errorf("making request: %v", err)
}
userAgent, err := GetUserAgent("duckduck-text-search")
if err != nil {
return nil, 0, err
}
req.Header.Set("User-Agent", userAgent)
resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, 0, fmt.Errorf("failed to do meta-request: %v", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
// Check for HTTP status code
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
// Parse HTML response
doc, err := goquery.NewDocumentFromReader(resp.Body) doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err) return nil, 0, fmt.Errorf("loading HTML document: %v", err)
} }
// Extract results from the page
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) { doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
header := s.Find(".result__a").Text() header := s.Find(".result__a").Text()
description := s.Find(".result__snippet").Text() description := s.Find(".result__snippet").Text()

View file

@ -11,46 +11,45 @@ import (
) )
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) { func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
startTime := time.Now()
const resultsPerPage = 10 const resultsPerPage = 10
var results []TextSearchResult
// 1) Build the search URL startTime := time.Now() // Start the timer
client := &http.Client{}
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage) searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
// 2) Create a new request
req, err := http.NewRequest("GET", searchURL, nil) req, err := http.NewRequest("GET", searchURL, nil)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("failed to create request: %v", err) return nil, 0, fmt.Errorf("failed to create request: %v", err)
} }
// 3) Generate and set a User-Agent header // User Agent generation
userAgent, err := GetUserAgent("Google-Text-Search") TextUserAgent, err := GetUserAgent("Text-Search")
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
req.Header.Set("User-Agent", userAgent)
// 4) Use the meta-proxy wrapper req.Header.Set("User-Agent", TextUserAgent)
resp, err := DoMetaProxyRequest(req)
resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("failed to do meta-request: %v", err) return nil, 0, fmt.Errorf("making request: %v", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
// 5) Check HTTP status
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
// 6) Parse the HTML response
doc, err := goquery.NewDocumentFromReader(resp.Body) doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err) return nil, 0, fmt.Errorf("loading HTML document: %v", err)
} }
results := parseResults(doc)
// 7) Calculate duration results = parseResults(doc)
duration := time.Since(startTime)
duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 { if len(results) == 0 {
printDebug("No results found from Google Search") printDebug("No results found from Google Search")
@ -66,7 +65,7 @@ func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
} }
langParam := "" langParam := ""
glParam := "" var glParam string
if lang != "" { if lang != "" {
// Use lang as the geolocation // Use lang as the geolocation
@ -78,16 +77,12 @@ func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
} }
startIndex := (page - 1) * resultsPerPage startIndex := (page - 1) * resultsPerPage
udmParam := "&udm=14" // Add the required parameter
// Build the URL string printDebug(fmt.Sprintf("https://www.google.com/search?q=%s%s%s%s&start=%d",
baseURL := "https://www.google.com/search?q=%s%s%s%s&start=%d%s" url.QueryEscape(query), safeParam, langParam, glParam, startIndex))
fullURL := fmt.Sprintf(baseURL, url.QueryEscape(query), safeParam, langParam, glParam, startIndex, udmParam)
// Debug print return fmt.Sprintf("https://www.google.com/search?q=%s%s%s%s&start=%d",
printDebug("%s", fullURL) url.QueryEscape(query), safeParam, langParam, glParam, startIndex)
return fullURL
} }
// func getRandomGeoLocation() (string, string) { // func getRandomGeoLocation() (string, string) {

View file

@ -3,11 +3,14 @@ package main
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"log"
"net/http" "net/http"
"net/url" "net/url"
"time" "time"
) )
const LIBREX_DOMAIN = "librex.antopie.org"
type LibreXResult struct { type LibreXResult struct {
Title string `json:"title"` Title string `json:"title"`
URL string `json:"url"` URL string `json:"url"`
@ -17,65 +20,65 @@ type LibreXResult struct {
type LibreXResponse []LibreXResult type LibreXResponse []LibreXResult
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) { func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
startTime := time.Now() startTime := time.Now() // Start the timer
// LibreX uses offset instead of page (starting at 0) // LibreX/Y uses offset instead of page that starts at 0
pageOffset := (page - 1) * 10 page--
page = page * 10
// Generate User-Agent searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
// User Agent generation
userAgent, err := GetUserAgent("librex-text-search") userAgent, err := GetUserAgent("librex-text-search")
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
var allResults []TextSearchResult req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, err
}
req.Header.Set("User-Agent", userAgent)
for _, domain := range config.LibreXInstances { client := &http.Client{}
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", resp, err := client.Do(req)
domain, if err != nil {
url.QueryEscape(query), return nil, 0, logError("error making request to LibreX", err)
pageOffset, }
) defer resp.Body.Close()
req, err := http.NewRequest("GET", searchURL, nil) if resp.StatusCode != http.StatusOK {
if err != nil { return nil, 0, logError("unexpected status code", fmt.Errorf("%d", resp.StatusCode))
printWarn("failed to create request for domain %s: %v", domain, err)
continue
}
req.Header.Set("User-Agent", userAgent)
resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, 0, fmt.Errorf("failed to do meta-request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
printWarn("unexpected status code from %s: %d", domain, resp.StatusCode)
continue
}
var librexResp LibreXResponse
if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
printWarn("error decoding response from %s: %v", domain, err)
continue
}
// Accumulate results from this instance
for _, item := range librexResp {
allResults = append(allResults, TextSearchResult{
URL: item.URL,
Header: item.Title,
Description: item.Description,
Source: "LibreX",
})
}
} }
duration := time.Since(startTime) var librexResp LibreXResponse
if len(allResults) == 0 { if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
return nil, duration, fmt.Errorf("no results found from any LibreX instance") return nil, 0, logError("error decoding LibreX response", err)
} }
return allResults, duration, nil var results []TextSearchResult
for _, item := range librexResp {
result := TextSearchResult{
URL: item.URL,
Header: item.Title,
Description: item.Description,
Source: "LibreX",
}
results = append(results, result)
}
duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
return nil, duration, fmt.Errorf("no results found")
}
return results, duration, nil
}
// This is just stupid it will probbably lead to printing error twice
func logError(message string, err error) error {
log.Printf("%s: %v", message, err)
return fmt.Errorf("%s: %w", message, err)
} }

View file

@ -26,11 +26,9 @@ type QwantTextAPIResponse struct {
} }
// PerformQwantTextSearch contacts the Qwant API and returns a slice of TextSearchResult // PerformQwantTextSearch contacts the Qwant API and returns a slice of TextSearchResult
func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) { func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error) {
startTime := time.Now()
const resultsPerPage = 10 const resultsPerPage = 10
offset := (page - 1) * resultsPerPage const offset = 0
// Ensure safe search is disabled by default if not specified // Ensure safe search is disabled by default if not specified
if safe == "" { if safe == "" {
@ -42,44 +40,39 @@ func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchRes
lang = "en_CA" lang = "en_CA"
} }
apiURL := fmt.Sprintf( apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop",
"https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop",
url.QueryEscape(query), url.QueryEscape(query),
resultsPerPage, resultsPerPage,
lang, lang,
offset, offset)
)
client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", apiURL, nil) req, err := http.NewRequest("GET", apiURL, nil)
if err != nil { if err != nil {
// Return three values: nil for the slice, 0 for duration, error for the third. return nil, fmt.Errorf("creating request: %v", err)
return nil, 0, fmt.Errorf("creating request: %v", err)
} }
userAgent, err := GetUserAgent("Quant-Text-Search") req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36")
if err != nil {
return nil, 0, err
}
req.Header.Set("User-Agent", userAgent)
resp, err := DoMetaProxyRequest(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("failed to do meta-request: %v", err) return nil, fmt.Errorf("making request: %v", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
var apiResp QwantTextAPIResponse var apiResp QwantTextAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil { if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
return nil, 0, fmt.Errorf("decoding response: %v", err) return nil, fmt.Errorf("decoding response: %v", err)
} }
// Extracting results from the nested JSON structure // Extracting results from the nested JSON structure
if len(apiResp.Data.Result.Items.Mainline) == 0 { if len(apiResp.Data.Result.Items.Mainline) == 0 {
return nil, 0, fmt.Errorf("no search results found") return nil, fmt.Errorf("no search results found")
} }
var results []TextSearchResult var results []TextSearchResult
@ -93,8 +86,7 @@ func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchRes
}) })
} }
duration := time.Since(startTime) return results, nil
return results, duration, nil
} }
// cleanQwantURL extracts the main part of the URL, removing tracking information // cleanQwantURL extracts the main part of the URL, removing tracking information

View file

@ -3,56 +3,31 @@ package main
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io/ioutil"
"log"
"net/http" "net/http"
"net/url"
"strings"
"time" "time"
) )
type Instance struct { type Instance struct {
URL string `json:"-"` // Populated from map key URL string `json:"url"`
Analytics bool `json:"analytics"` Status int `json:"status"`
Comments []string `json:"comments"` SSLGrade string `json:"ssl_grade"`
AlternativeUrls map[string]interface{} `json:"alternativeUrls"`
Main bool `json:"main"`
NetworkType string `json:"network_type"`
HTTP struct {
StatusCode int `json:"status_code"`
Error string `json:"error"`
} `json:"http"`
Version string `json:"version"`
Grade string `json:"grade"`
GradeURL string `json:"gradeUrl"`
Generator string `json:"generator"`
ContactURL FlexibleType `json:"contact_url"` // Custom type
DocsURL string `json:"docs_url"`
}
type FlexibleType struct {
StringValue string
BoolValue bool
IsString bool
} }
const searxInstancesURL = "https://searx.space/data/instances.json" const searxInstancesURL = "https://searx.space/data/instances.json"
// FetchInstances fetches available SearX instances from the registry.
func fetchInstances() ([]Instance, error) { func fetchInstances() ([]Instance, error) {
client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", searxInstancesURL, nil) req, err := http.NewRequest("GET", searxInstancesURL, nil)
if err != nil { if err != nil {
return nil, fmt.Errorf("creating request: %v", err) return nil, err
} }
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
XNGUserAgent, err := GetUserAgent("Text-Search-XNG") resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("generating User-Agent: %v", err) return nil, err
}
req.Header.Set("User-Agent", XNGUserAgent)
resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, fmt.Errorf("failed to do meta-request: %v", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
@ -60,105 +35,44 @@ func fetchInstances() ([]Instance, error) {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode) return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
} }
body, err := io.ReadAll(resp.Body) body, err := ioutil.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, fmt.Errorf("reading response body: %v", err) return nil, err
} }
// Root structure of the JSON response
var root struct {
Instances map[string]Instance `json:"instances"`
}
// Unmarshal JSON into the root structure
err = json.Unmarshal(body, &root)
if err != nil {
return nil, fmt.Errorf("parsing response JSON: %v", err)
}
// Collect instances into a slice
var instances []Instance var instances []Instance
for url, instance := range root.Instances { err = json.Unmarshal(body, &instances)
instance.URL = url // Assign the URL from the map key if err != nil {
instances = append(instances, instance) return nil, err
} }
return instances, nil return instances, nil
} }
// UnmarshalJSON implements custom unmarshalling for FlexibleType.
func (f *FlexibleType) UnmarshalJSON(data []byte) error {
// Try to unmarshal as a string
var str string
if err := json.Unmarshal(data, &str); err == nil {
f.StringValue = str
f.IsString = true
return nil
}
// Try to unmarshal as a bool
var b bool
if err := json.Unmarshal(data, &b); err == nil {
f.BoolValue = b
f.IsString = false
return nil
}
// Return an error if neither works
return fmt.Errorf("invalid FlexibleType: %s", string(data))
}
// String returns the string representation of FlexibleType.
func (f FlexibleType) String() string {
if f.IsString {
return f.StringValue
}
return fmt.Sprintf("%v", f.BoolValue)
}
// ValidateInstance checks if a SearX instance is valid by performing a test query.
func validateInstance(instance Instance) bool { func validateInstance(instance Instance) bool {
// Skip .onion instances client := &http.Client{Timeout: 10 * time.Second}
if strings.Contains(instance.URL, ".onion") { req, err := http.NewRequest("GET", fmt.Sprintf("%s/search?q=test&categories=general&language=en&safe_search=1&page=1&format=json", instance.URL), nil)
printDebug("Skipping .onion instance: %s", instance.URL)
return false
}
client := &http.Client{
Timeout: 10 * time.Second,
}
testURL := fmt.Sprintf("%s/search?q=test&categories=general&language=en&safe_search=1&page=1&format=json", instance.URL)
req, err := http.NewRequest("GET", testURL, nil)
if err != nil { if err != nil {
printDebug("Error creating SearchXNG request for instance validation: %v", err) log.Printf("Error creating request for URL: %s, Error: %v", instance.URL, err)
return false return false
} }
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
if err != nil {
printWarn("Error generating User-Agent: %v", err)
return false
}
req.Header.Set("User-Agent", XNGUserAgent)
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
printDebug("Error performing request for SearchXNG instance validation: %v", err) log.Printf("Error performing request for URL: %s, Error: %v", instance.URL, err)
return false return false
} }
defer resp.Body.Close() defer resp.Body.Close()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
printDebug("SearchXNG Instance validation failed. StatusCode: %d", resp.StatusCode) log.Printf("Instance validation failed for URL: %s, StatusCode: %d", instance.URL, resp.StatusCode)
return false return false
} }
// Successful validation
return true return true
} }
// GetValidInstance fetches and validates SearX instances, returning a valid one.
func getValidInstance() (*Instance, error) { func getValidInstance() (*Instance, error) {
instances, err := fetchInstances() instances, err := fetchInstances()
if err != nil { if err != nil {
@ -174,81 +88,11 @@ func getValidInstance() (*Instance, error) {
return nil, fmt.Errorf("no valid SearX instances found") return nil, fmt.Errorf("no valid SearX instances found")
} }
// PerformSearXTextSearch performs a text search using a SearX instance. // func main() {
func PerformSearXTextSearch(query, categories, language string, page int) ([]TextSearchResult, time.Duration, error) { // instance, err := getValidInstance()
// Default value for "safe" search // if err != nil {
safe := "1" // log.Fatalf("Failed to get a valid SearX instance: %v", err)
// }
startTime := time.Now() // Start the timer // log.Printf("Selected SearX instance: %s", instance.URL)
var results []TextSearchResult // }
instance, err := getValidInstance()
if err != nil {
return nil, 0, fmt.Errorf("failed to get a valid SearX instance: %w", err)
}
searchURL := fmt.Sprintf("%s/search?q=%s&categories=%s&language=%s&safe_search=%s&page=%d&format=json",
instance.URL, url.QueryEscape(query), categories, language, safe, page)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
if err != nil {
return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
}
req.Header.Set("User-Agent", XNGUserAgent)
var resp *http.Response
if config.MetaProxyEnabled && metaProxyClient != nil {
resp, err = metaProxyClient.Do(req)
} else {
client := &http.Client{Timeout: 10 * time.Second}
resp, err = client.Do(req)
}
if err != nil {
return nil, 0, fmt.Errorf("performing request: %v", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("reading response body: %v", err)
}
// Parse the JSON response to extract search results
var response map[string]interface{}
err = json.Unmarshal(body, &response)
if err != nil {
return nil, 0, fmt.Errorf("parsing response JSON: %v", err)
}
// Extract search results
if items, ok := response["results"].([]interface{}); ok {
for _, item := range items {
if result, ok := item.(map[string]interface{}); ok {
title := strings.TrimSpace(fmt.Sprintf("%v", result["title"]))
url := strings.TrimSpace(fmt.Sprintf("%v", result["url"]))
description := strings.TrimSpace(fmt.Sprintf("%v", result["content"]))
results = append(results, TextSearchResult{
Header: title,
URL: url,
Description: description,
})
}
}
}
duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
printDebug("No results found for query: %s", query)
return nil, duration, fmt.Errorf("no results found")
}
printDebug("Search completed successfully for query: %s, found %d results", query, len(results))
return results, duration, nil
}

37
text.go
View file

@ -1,32 +1,20 @@
package main package main
import ( import (
"fmt"
"net/http" "net/http"
"time" "time"
) )
var textSearchEngines []SearchEngine var textSearchEngines []SearchEngine
var allTextSearchEngines = []SearchEngine{ func init() {
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch)}, textSearchEngines = []SearchEngine{
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)}, {Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch)},
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)}, {Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)}, {Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
{Name: "Quant", Func: wrapTextSearchFunc(PerformQwantTextSearch)}, // Broken ! {Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
//{Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXTextSearch)}, // bruh // {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
}
func initTextEngines() {
// textSearchEngines is your final slice (already declared globally)
textSearchEngines = nil // or make([]SearchEngine, 0)
for _, engineName := range config.MetaSearch.Text {
for _, candidate := range allTextSearchEngines {
if candidate.Name == engineName {
textSearchEngines = append(textSearchEngines, candidate)
break
}
}
} }
} }
@ -50,7 +38,7 @@ func HandleTextSearch(w http.ResponseWriter, settings UserSettings, query string
data := map[string]interface{}{ data := map[string]interface{}{
"Results": combinedResults, "Results": combinedResults,
"Query": query, "Query": query,
"Fetched": FormatElapsedTime(elapsedTime), "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
"Page": page, "Page": page,
"HasPrevPage": page > 1, "HasPrevPage": page > 1,
"HasNextPage": len(combinedResults) >= 50, "HasNextPage": len(combinedResults) >= 50,
@ -91,7 +79,7 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
} }
} else { } else {
textResults, _, _, _, _ := convertToSpecificResults(results) textResults, _, _, _ := convertToSpecificResults(results)
combinedResults = textResults combinedResults = textResults
} }
case <-time.After(2 * time.Second): case <-time.After(2 * time.Second):
@ -110,7 +98,7 @@ func prefetchPage(query, safe, lang string, page int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"} cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists { if _, exists := resultsCache.Get(cacheKey); !exists {
printInfo("Page %d not cached, caching now...", page) printInfo("Page %d not cached, caching now...", page)
if config.MetaSearchEnabled { if config.CrawlerEnabled {
pageResults := fetchTextResults(query, safe, lang, page) pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 { if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults)) resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
@ -123,11 +111,10 @@ func prefetchPage(query, safe, lang string, page int) {
} }
} }
// The logic in this function is rotating search engines instead of running them in order as noted in the wiki
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult { func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult var results []TextSearchResult
if !config.MetaSearchEnabled { if !config.CrawlerEnabled {
printDebug("Crawler is disabled; fetching from local index.") printDebug("Crawler is disabled; fetching from local index.")
// Calculate the starting position based on the page number // Calculate the starting position based on the page number

View file

@ -72,9 +72,8 @@ func saveUserSettings(w http.ResponseWriter, settings UserSettings) {
Value: cd.GetValue(settings), Value: cd.GetValue(settings),
Path: "/", Path: "/",
Expires: expiration, Expires: expiration,
Secure: true, // Ensure HTTPS is required Secure: true,
HttpOnly: true, SameSite: http.SameSiteStrictMode,
SameSite: http.SameSiteStrictMode, // Restrict cross-site usage
}) })
} }

Some files were not shown because too many files have changed in this diff Show more