diff --git a/README.md b/README.md
index 0ce9dd5..5ad3337 100644
--- a/README.md
+++ b/README.md
@@ -47,11 +47,11 @@ A self-hosted private search engine designed to be scalable and more resource-ef
### For Self-Hosting
-- **[Easy to Set Up](https://weforge.xyz/Spitfire/Search/wiki/Setup-Other)** - Quick and straightforward setup process for anyone.
+- **Self-hosted option** - Run on your own server for even more privacy.
- **Lightweight** - Low memory footprint (15-30MiB) even during searches.
- **Decentralized** - No single point of failure.
- **Results caching in RAM** - Faster response times through caching.
-- **[Configurable](https://weforge.xyz/Spitfire/Search/wiki/Config)** - Fully customizable via the `config.ini` file.
+- **Configurable** - Tweak features via `config.ini`.
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
### Results Sources
@@ -73,20 +73,30 @@ A self-hosted private search engine designed to be scalable and more resource-ef
### Running the QGato
+Linux:
+
```bash
git clone https://weforge.xyz/Spitfire/Search.git
cd Search
-go run .
+chmod +x ./run.sh
+./run.sh
+```
+
+Windows:
+
+```powershell
+git clone https://weforge.xyz/Spitfire/Search.git
+cd Search
+.\run.bat
```
*Its that easy!*
### Configuring
-- Configuration is done via the `config.ini` file.
-- On first start, you will be guided through the basic setup.
-- For more advanced configuration options, visit the [Wiki Configuration Page](https://weforge.xyz/Spitfire/Search/wiki/Configuration).
-
+Configuration is done via the ``config.ini`` file.
+On first start, you will be guided through the basic setup.
+More advanced setup and all options will be listed here later, as this is still being updated.
## License
diff --git a/agent.go b/agent.go
index 90fb669..6333102 100755
--- a/agent.go
+++ b/agent.go
@@ -11,13 +11,11 @@ import (
"time"
)
-// BrowserVersion represents the version & global usage from the caniuse data
type BrowserVersion struct {
Version string `json:"version"`
Global float64 `json:"global"`
}
-// BrowserData holds sets of versions for Firefox and Chromium
type BrowserData struct {
Firefox []BrowserVersion `json:"firefox"`
Chromium []BrowserVersion `json:"chrome"`
@@ -30,7 +28,6 @@ var (
}{
data: make(map[string]string),
}
-
browserCache = struct {
sync.RWMutex
data BrowserData
@@ -40,19 +37,26 @@ var (
}
)
-// fetchLatestBrowserVersions retrieves usage data from caniuse.com’s fulldata JSON.
func fetchLatestBrowserVersions() (BrowserData, error) {
- const urlCaniuse = "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
+ url := "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
+
+ // // Optional: skip TLS verification to avoid certificate errors
+ // transport := &http.Transport{
+ // TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+ // }
+
+ // Increase the HTTP client timeout
client := &http.Client{
Timeout: 30 * time.Second,
+ // Transport: transport,
}
- req, err := http.NewRequest("GET", urlCaniuse, nil)
+ // Build the request manually to set headers
+ req, err := http.NewRequest("GET", url, nil)
if err != nil {
return BrowserData{}, err
}
-
- // Set a simple custom User-Agent and language
+ // Custom user agent and English language preference
req.Header.Set("User-Agent", "MyCustomAgent/1.0 (compatible; +https://example.com)")
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
@@ -67,42 +71,36 @@ func fetchLatestBrowserVersions() (BrowserData, error) {
return BrowserData{}, err
}
- var rawData map[string]any
+ var rawData map[string]interface{}
if err := json.Unmarshal(body, &rawData); err != nil {
return BrowserData{}, err
}
- stats, ok := rawData["agents"].(map[string]any)
- if !ok {
- return BrowserData{}, fmt.Errorf("unexpected JSON structure (no 'agents' field)")
- }
+ stats := rawData["agents"].(map[string]interface{})
var data BrowserData
- // Extract Firefox data
- if firefoxData, ok := stats["firefox"].(map[string]any); ok {
- if usageMap, ok := firefoxData["usage_global"].(map[string]any); ok {
- for version, usage := range usageMap {
- val, _ := usage.(float64)
- data.Firefox = append(data.Firefox, BrowserVersion{Version: version, Global: val})
- }
+ if firefoxData, ok := stats["firefox"].(map[string]interface{}); ok {
+ for version, usage := range firefoxData["usage_global"].(map[string]interface{}) {
+ data.Firefox = append(data.Firefox, BrowserVersion{
+ Version: version,
+ Global: usage.(float64),
+ })
}
}
- // Extract Chrome data
- if chromeData, ok := stats["chrome"].(map[string]any); ok {
- if usageMap, ok := chromeData["usage_global"].(map[string]any); ok {
- for version, usage := range usageMap {
- val, _ := usage.(float64)
- data.Chromium = append(data.Chromium, BrowserVersion{Version: version, Global: val})
- }
+ if chromeData, ok := stats["chrome"].(map[string]interface{}); ok {
+ for version, usage := range chromeData["usage_global"].(map[string]interface{}) {
+ data.Chromium = append(data.Chromium, BrowserVersion{
+ Version: version,
+ Global: usage.(float64),
+ })
}
}
return data, nil
}
-// getLatestBrowserVersions checks the cache and fetches new data if expired
func getLatestBrowserVersions() (BrowserData, error) {
browserCache.RLock()
if time.Now().Before(browserCache.expires) {
@@ -119,36 +117,37 @@ func getLatestBrowserVersions() (BrowserData, error) {
browserCache.Lock()
browserCache.data = data
- browserCache.expires = time.Now().Add(24 * time.Hour) // Refresh daily
+ browserCache.expires = time.Now().Add(24 * time.Hour)
browserCache.Unlock()
return data, nil
}
-// randomUserAgent picks a random browser (Firefox/Chromium), selects a version based on usage,
-// picks an OS string, and composes a User-Agent header.
func randomUserAgent() (string, error) {
browsers, err := getLatestBrowserVersions()
if err != nil {
return "", err
}
- r := rand.New(rand.NewSource(time.Now().UnixNano()))
+ rand := rand.New(rand.NewSource(time.Now().UnixNano()))
- // Overall usage: 80% chance for Chromium, 20% for Firefox
+ // Simulated browser usage statistics (in percentages)
usageStats := map[string]float64{
- "Firefox": 20.0,
- "Chromium": 80.0,
+ "Firefox": 30.0,
+ "Chromium": 70.0,
}
- // Weighted random selection of the browser type
+ // Calculate the probabilities for the versions
+ probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
+
+ // Select a browser based on usage statistics
browserType := ""
- randVal := r.Float64() * 100
+ randVal := rand.Float64() * 100
cumulative := 0.0
- for bType, usage := range usageStats {
+ for browser, usage := range usageStats {
cumulative += usage
if randVal < cumulative {
- browserType = bType
+ browserType = browser
break
}
}
@@ -165,16 +164,14 @@ func randomUserAgent() (string, error) {
return "", fmt.Errorf("no versions found for browser: %s", browserType)
}
- // Sort by global usage descending
+ // Sort versions by usage (descending order)
sort.Slice(versions, func(i, j int) bool {
return versions[i].Global > versions[j].Global
})
- // Probability distribution for top few versions
- probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
-
+ // Select a version based on the probabilities
version := ""
- randVal = r.Float64()
+ randVal = rand.Float64()
cumulative = 0.0
for i, p := range probabilities {
cumulative += p
@@ -184,72 +181,68 @@ func randomUserAgent() (string, error) {
}
}
- // Fallback to the least used version if none matched
+ // Fallback to the last version if none matched
if version == "" {
version = versions[len(versions)-1].Version
}
- userAgent := generateUserAgent(browserType, version, r)
+ // Generate the user agent string
+ userAgent := generateUserAgent(browserType, version)
return userAgent, nil
}
-// generateUserAgent composes the final UA string given the browser, version, and OS.
-func generateUserAgent(browser, version string, r *rand.Rand) string {
+func generateUserAgent(browser, version string) string {
oses := []struct {
os string
probability float64
}{
{"Windows NT 10.0; Win64; x64", 44.0},
- {"X11; Linux x86_64", 2.0},
- {"X11; Ubuntu; Linux x86_64", 2.0},
+ {"Windows NT 11.0; Win64; x64", 44.0},
+ {"X11; Linux x86_64", 1.0},
+ {"X11; Ubuntu; Linux x86_64", 1.0},
{"Macintosh; Intel Mac OS X 10_15_7", 10.0},
}
- // Weighted random selection for OS
- randVal := r.Float64() * 100
+ // Select an OS based on probabilities
+ randVal := rand.Float64() * 100
cumulative := 0.0
- selectedOS := oses[0].os // Default in case distribution is off
- for _, entry := range oses {
- cumulative += entry.probability
+ selectedOS := ""
+ for _, os := range oses {
+ cumulative += os.probability
if randVal < cumulative {
- selectedOS = entry.os
+ selectedOS = os.os
break
}
}
switch browser {
case "Firefox":
- // Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:117.0) Gecko/20100101 Firefox/117.0
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
case "Chromium":
- // Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.5938.132 Safari/537.36
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
- default:
- return ""
}
+ return ""
}
-// updateCachedUserAgents randomly updates half of the cached UAs to new versions
func updateCachedUserAgents(newVersions BrowserData) {
cache.Lock()
defer cache.Unlock()
-
- r := rand.New(rand.NewSource(time.Now().UnixNano()))
for key, userAgent := range cache.data {
- if r.Float64() < 0.5 {
- updatedUserAgent := updateUserAgentVersion(userAgent, newVersions, r)
+ randVal := rand.Float64()
+ if randVal < 0.5 {
+ updatedUserAgent := updateUserAgentVersion(userAgent, newVersions)
cache.data[key] = updatedUserAgent
}
}
}
-// updateUserAgentVersion tries to parse the old UA, detect its browser, and update the version
-func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.Rand) string {
+func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
+ // Parse the current user agent to extract browser and version
var browserType, version string
-
- // Attempt to detect old UA patterns (Chromium or Firefox)
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium"
+ } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
+ browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
@@ -258,6 +251,8 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.R
browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox"
+ } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
+ browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
@@ -266,37 +261,22 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.R
browserType = "Firefox"
}
- // Grab the newest version from the fetched data
+ // Get the latest version for that browser
var latestVersion string
if browserType == "Firefox" && len(newVersions.Firefox) > 0 {
- // Sort by usage descending
- sort.Slice(newVersions.Firefox, func(i, j int) bool {
- return newVersions.Firefox[i].Global > newVersions.Firefox[j].Global
- })
latestVersion = newVersions.Firefox[0].Version
} else if browserType == "Chromium" && len(newVersions.Chromium) > 0 {
- // Sort by usage descending
- sort.Slice(newVersions.Chromium, func(i, j int) bool {
- return newVersions.Chromium[i].Global > newVersions.Chromium[j].Global
- })
latestVersion = newVersions.Chromium[0].Version
}
- // If we failed to detect the browser or have no data, just return the old UA
- if browserType == "" || latestVersion == "" {
- return userAgent
- }
-
- // Create a new random OS-based UA string with the latest version
- return generateUserAgent(browserType, latestVersion, r)
+ // Update the user agent string with the new version
+ return generateUserAgent(browserType, latestVersion)
}
-// periodicAgentUpdate periodically refreshes browser data and user agents
func periodicAgentUpdate() {
for {
- // Sleep a random interval between 1 and 2 days
- r := rand.New(rand.NewSource(time.Now().UnixNano()))
- time.Sleep(time.Duration(24+r.Intn(24)) * time.Hour)
+ // Sleep for a random interval between 1 and 2 days
+ time.Sleep(time.Duration(24+rand.Intn(24)) * time.Hour)
// Fetch the latest browser versions
newVersions, err := fetchLatestBrowserVersions()
@@ -316,7 +296,6 @@ func periodicAgentUpdate() {
}
}
-// GetUserAgent returns a cached UA for the given key or creates one if none exists.
func GetUserAgent(cacheKey string) (string, error) {
cache.RLock()
userAgent, found := cache.data[cacheKey]
@@ -335,11 +314,9 @@ func GetUserAgent(cacheKey string) (string, error) {
cache.data[cacheKey] = userAgent
cache.Unlock()
- printDebug("Generated (cached or new) user agent: %s", userAgent)
return userAgent, nil
}
-// GetNewUserAgent always returns a newly generated UA, overwriting the cache.
func GetNewUserAgent(cacheKey string) (string, error) {
userAgent, err := randomUserAgent()
if err != nil {
@@ -350,7 +327,6 @@ func GetNewUserAgent(cacheKey string) (string, error) {
cache.data[cacheKey] = userAgent
cache.Unlock()
- printDebug("Generated new user agent: %s", userAgent)
return userAgent, nil
}
diff --git a/cache.go b/cache.go
index f769066..ac2902d 100644
--- a/cache.go
+++ b/cache.go
@@ -62,18 +62,6 @@ type ForumSearchResult struct {
ThumbnailSrc string `json:"thumbnailSrc,omitempty"`
}
-type MusicResult struct {
- URL string
- Title string
- Artist string
- Description string
- PublishedDate string
- Thumbnail string
- // AudioURL string
- Source string
- Duration string
-}
-
// GeocodeCachedItem represents a geocoding result stored in the cache.
type GeocodeCachedItem struct {
Latitude string
@@ -135,11 +123,6 @@ func NewGeocodeCache() *GeocodeCache {
// Get retrieves the results for a given key from the cache.
func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
- // Skip if RAM caching is disabled
- if !config.RamCacheEnabled {
- return nil, false
- }
-
rc.mu.Lock()
defer rc.mu.Unlock()
@@ -160,11 +143,6 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
// Set stores the results for a given key in the cache.
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
- // Skip if RAM caching is disabled
- if !config.RamCacheEnabled {
- return
- }
-
rc.mu.Lock()
defer rc.mu.Unlock()
@@ -184,11 +162,6 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
// checkAndCleanCache removes items if memory usage exceeds the limit.
func (rc *ResultsCache) checkAndCleanCache() {
- // Skip if RAM caching is disabled
- if !config.RamCacheEnabled {
- return
- }
-
if rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
rc.cleanOldestItems()
}
@@ -206,11 +179,6 @@ func (rc *ResultsCache) currentMemoryUsage() uint64 {
// Get retrieves the geocoding result for a given query from the cache.
func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found bool, exists bool) {
- // Skip if RAM caching is disabled
- if !config.RamCacheEnabled {
- return "", "", false, false
- }
-
gc.mu.Lock()
defer gc.mu.Unlock()
@@ -230,11 +198,6 @@ func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found boo
}
func (gc *GeocodeCache) Set(query, latitude, longitude string, found bool) {
- // Skip if RAM caching is disabled
- if !config.RamCacheEnabled {
- return
- }
-
gc.mu.Lock()
defer gc.mu.Unlock()
@@ -296,23 +259,15 @@ func convertToSearchResults(results interface{}) []SearchResult {
genericResults[i] = r
}
return genericResults
- case []MusicResult:
- genericResults := make([]SearchResult, len(res))
- for i, r := range res {
- genericResults[i] = r
- }
- return genericResults
}
return nil
}
-func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult, []MusicResult) {
+func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) {
var textResults []TextSearchResult
var torrentResults []TorrentResult
var imageResults []ImageSearchResult
var forumResults []ForumSearchResult
- var musicResults []MusicResult
-
for _, r := range results {
switch res := r.(type) {
case TextSearchResult:
@@ -323,9 +278,7 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor
imageResults = append(imageResults, res)
case ForumSearchResult:
forumResults = append(forumResults, res)
- case MusicResult:
- musicResults = append(musicResults, res)
}
}
- return textResults, torrentResults, imageResults, forumResults, musicResults
+ return textResults, torrentResults, imageResults, forumResults
}
diff --git a/config.go b/config.go
index 0cda67d..18d83cf 100644
--- a/config.go
+++ b/config.go
@@ -22,43 +22,24 @@ type CacheConfig struct {
Path string
}
-type MetaSearchConfig struct {
- Text []string
- Image []string
- Files []string
- Video []string
-}
-
type Config struct {
- Port int
- AuthCode string
- PeerID string
- Peers []string
- Domain string
- NodesEnabled bool
- MetaSearchEnabled bool
- IndexerEnabled bool
- WebsiteEnabled bool
- RamCacheEnabled bool
- DriveCacheEnabled bool
- MetaProxyEnabled bool
- MetaProxyStrict bool
- MetaProxyRetry int
- MetaProxies []string
- CrawlerProxyEnabled bool
- CrawlerProxyStrict bool
- CrawlerProxies []string
- CrawlerProxyRetry int
- // Maybye add Proxy support for Image Extraction?
- LogLevel int
+ Port int // Added
+ AuthCode string // Added
+ PeerID string // Added
+ Peers []string
+ Domain string // Added
+ NodesEnabled bool // Added
+ CrawlerEnabled bool // Added
+ IndexerEnabled bool // Added
+ WebsiteEnabled bool // Added
+ RamCacheEnabled bool
+ DriveCacheEnabled bool // Added
+ LogLevel int // Added
ConcurrentStandardCrawlers int
ConcurrentChromeCrawlers int
CrawlingInterval time.Duration // Refres crawled results in...
MaxPagesPerDomain int // Max pages to crawl per domain
IndexBatchSize int
- LibreXInstances []string
-
- MetaSearch MetaSearchConfig
DriveCache CacheConfig
RamCache CacheConfig
@@ -70,52 +51,17 @@ var defaultConfig = Config{
Peers: []string{},
AuthCode: generateStrongRandomString(64),
NodesEnabled: false,
- MetaSearchEnabled: true,
+ CrawlerEnabled: true,
IndexerEnabled: false,
WebsiteEnabled: true,
RamCacheEnabled: true,
DriveCacheEnabled: false,
- MetaProxyEnabled: false,
- MetaProxyStrict: true,
- MetaProxies: []string{},
- MetaProxyRetry: 3,
- CrawlerProxyEnabled: false,
- CrawlerProxyStrict: true,
- CrawlerProxies: []string{},
- CrawlerProxyRetry: 1,
ConcurrentStandardCrawlers: 12,
ConcurrentChromeCrawlers: 4,
CrawlingInterval: 24 * time.Hour,
MaxPagesPerDomain: 10,
IndexBatchSize: 50,
LogLevel: 1,
- LibreXInstances: []string{"librex.antopie.org"},
- MetaSearch: MetaSearchConfig{
- // For Text search (skip SearXNG and LibreX by default, as that would be mega stupid)
- Text: []string{"Google", "Brave", "DuckDuckGo"},
-
- // For Image search
- Image: []string{"Qwant", "Bing", "DeviantArt"},
-
- // For Files search
- Files: []string{"TorrentGalaxy", "ThePirateBay"},
-
- // For Video (piped instances)
- Video: []string{
- "api.piped.yt",
- "pipedapi.moomoo.me",
- "pipedapi.darkness.services",
- "pipedapi.kavin.rocks",
- "piped-api.hostux.net",
- "pipedapi.syncpundit.io",
- "piped-api.cfe.re",
- "pipedapi.in.projectsegfau.lt",
- "piapi.ggtyler.dev",
- "piped-api.codespace.cz",
- "pipedapi.coldforge.xyz",
- "pipedapi.osphost.fi",
- },
- },
DriveCache: CacheConfig{
Duration: 48 * time.Hour, // Added
Path: "./cache", // Added
@@ -299,33 +245,14 @@ func saveConfig(config Config) {
// Features section
featuresSec := cfg.Section("Features")
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
- featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.MetaSearchEnabled))
+ featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
- featuresSec.Key("MetaProxy").SetValue(strconv.FormatBool(config.MetaProxyEnabled))
- featuresSec.Key("CrawlerProxy").SetValue(strconv.FormatBool(config.CrawlerProxyEnabled))
-
- // Proxies section
- proxiesSec := cfg.Section("Proxies")
- proxiesSec.Key("MetaProxyStrict").SetValue(strconv.FormatBool(config.MetaProxyStrict))
- proxiesSec.Key("MetaProxies").SetValue(strings.Join(config.MetaProxies, ","))
- proxiesSec.Key("CrawlerProxyStrict").SetValue(strconv.FormatBool(config.CrawlerProxyStrict))
- proxiesSec.Key("CrawlerProxies").SetValue(strings.Join(config.CrawlerProxies, ","))
- proxiesSec.Key("MetaProxyRetry").SetValue(strconv.Itoa(config.MetaProxyRetry))
- proxiesSec.Key("CrawlerProxyRetry").SetValue(strconv.Itoa(config.CrawlerProxyRetry))
-
- // MetaSearch section
- metaSec := cfg.Section("MetaSearches")
- metaSec.Key("LibreXInstances").SetValue(strings.Join(config.LibreXInstances, ","))
- metaSec.Key("Text").SetValue(strings.Join(config.MetaSearch.Text, ","))
- metaSec.Key("Image").SetValue(strings.Join(config.MetaSearch.Image, ","))
- metaSec.Key("Files").SetValue(strings.Join(config.MetaSearch.Files, ","))
- metaSec.Key("Video").SetValue(strings.Join(config.MetaSearch.Video, ","))
// Indexer section
indexerSec := cfg.Section("Indexer")
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
- indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentChromeCrawlers))
+ indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
@@ -365,28 +292,11 @@ func loadConfig() Config {
// Features
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
- metaSearchEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.MetaSearchEnabled)
+ crawlerEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.CrawlerEnabled)
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
- metaProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("MetaProxy"), defaultConfig.MetaProxyEnabled)
- crawlerProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("CrawlerProxy"), defaultConfig.CrawlerProxyEnabled)
-
- // Proxies
- metaProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("MetaProxyStrict"), defaultConfig.MetaProxyStrict)
- metaProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("MetaProxies"), ""), ",")
- crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
- crawlerProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("CrawlerProxies"), ""), ",")
- metaProxyRetry := getConfigValue(cfg.Section("Proxies").Key("MetaProxyRetry"), defaultConfig.MetaProxyRetry, strconv.Atoi)
- crawlerProxyRetry := getConfigValue(cfg.Section("Proxies").Key("CrawlerProxyRetry"), defaultConfig.CrawlerProxyRetry, strconv.Atoi)
-
- // MetaSearch
- searchXInstances := strings.Split(getConfigValueString(cfg.Section("MetaSearches").Key("LibreXInstances"), strings.Join(defaultConfig.LibreXInstances, ",")), ",")
- textList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Text"), strings.Join(defaultConfig.MetaSearch.Text, ",")), ",")
- imageList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Image"), strings.Join(defaultConfig.MetaSearch.Image, ",")), ",")
- filesList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Files"), strings.Join(defaultConfig.MetaSearch.Files, ",")), ",")
- videoList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Video"), strings.Join(defaultConfig.MetaSearch.Video, ",")), ",")
// Indexing
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
@@ -415,31 +325,16 @@ func loadConfig() Config {
AuthCode: authCode,
Peers: peers,
NodesEnabled: nodesEnabled,
- MetaSearchEnabled: metaSearchEnabled,
+ CrawlerEnabled: crawlerEnabled,
IndexerEnabled: indexerEnabled,
WebsiteEnabled: websiteEnabled,
RamCacheEnabled: ramCacheEnabled,
DriveCacheEnabled: driveCacheEnabled,
- MetaProxyEnabled: metaProxyEnabled,
- MetaProxyStrict: metaProxyStrict,
- MetaProxies: metaProxies,
- MetaProxyRetry: metaProxyRetry,
- CrawlerProxyEnabled: crawlerProxyEnabled,
- CrawlerProxyStrict: crawlerProxyStrict,
- CrawlerProxies: crawlerProxies,
- CrawlerProxyRetry: crawlerProxyRetry,
ConcurrentStandardCrawlers: concurrentStandardCrawlers,
ConcurrentChromeCrawlers: concurrentChromeCrawlers,
CrawlingInterval: crawlingInterval,
MaxPagesPerDomain: maxPagesPerDomain,
IndexBatchSize: indexBatchSize,
- LibreXInstances: searchXInstances,
- MetaSearch: MetaSearchConfig{
- Text: textList,
- Image: imageList,
- Files: filesList,
- Video: videoList,
- },
DriveCache: CacheConfig{
Duration: driveDuration,
MaxUsageBytes: driveMaxUsage,
diff --git a/crawler-extraction.go b/crawler-extraction.go
index 7fe2591..4ce8b9d 100644
--- a/crawler-extraction.go
+++ b/crawler-extraction.go
@@ -32,12 +32,8 @@ func fetchPageMetadataStandard(pageURL, userAgent string) (string, string, strin
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
- // Create a custom allocator context for Chromedp with proxy support if enabled
- allocCtx, cancelAlloc := chromedp.NewExecAllocator(context.Background(), configureChromeOptions()...)
- defer cancelAlloc()
-
- // Create a browser context
- ctx, cancel := chromedp.NewContext(allocCtx)
+ // Create context
+ ctx, cancel := chromedp.NewContext(context.Background())
defer cancel()
var renderedHTML string
@@ -61,36 +57,9 @@ func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string)
return extractParsedDOM(doc)
}
-// configureChromeOptions sets up Chrome options and proxy if CrawlerProxy is enabled.
-func configureChromeOptions() []chromedp.ExecAllocatorOption {
- options := chromedp.DefaultExecAllocatorOptions[:]
-
- // This code is not using config.CrawlerProxyRetry
- if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
- // Retrieve proxy settings from CrawlerProxy
- proxy := crawlerProxyClient.GetProxy() // Ensure a `GetProxy` method is implemented for your proxy client
- if proxy != "" {
- options = append(options, chromedp.ProxyServer(proxy))
- printDebug("Using CrawlerProxy for Chromedp: %s", proxy)
- } else {
- printWarn("CrawlerProxy is enabled but no valid proxy is available")
- }
- }
-
- // // Add additional Chrome
- // options = append(options,
- // chromedp.Flag("headless", true),
- // chromedp.Flag("disable-gpu", true),
- // chromedp.Flag("no-sandbox", true),
- // chromedp.Flag("disable-setuid-sandbox", true),
- // )
-
- return options
-}
-
// extractStandard does the normal HTML parse with OG, Twitter, etc.
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
-
+ client := &http.Client{Timeout: 15 * time.Second}
req, err := http.NewRequest("GET", pageURL, nil)
if err != nil {
printDebug("Failed to create request for %s: %v", pageURL, err)
@@ -99,8 +68,7 @@ func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
req.Header.Set("User-Agent", userAgent)
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
- // Use CrawlerProxy if enabled
- resp, err := DoCrawlerProxyRequest(req)
+ resp, err := client.Do(req)
if err != nil {
printDebug("Failed to GET %s: %v", pageURL, err)
return
@@ -208,6 +176,7 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
return title, desc, keywords
}
+ client := &http.Client{Timeout: 15 * time.Second}
readReq, err := http.NewRequest("GET", pageURL, nil)
if err != nil {
printDebug("Failed to create fallbackReadability request: %v", err)
@@ -216,16 +185,14 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
readReq.Header.Set("User-Agent", userAgent)
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
- // Use CrawlerProxy if enabled
- readResp, err := DoCrawlerProxyRequest(readReq)
- if err != nil {
- printDebug("go-readability GET error for %s: %v", pageURL, err)
- return title, desc, keywords
- }
-
- if readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
- printDebug("go-readability GET returned status %d for %s", readResp.StatusCode, pageURL)
- readResp.Body.Close() // Safely close body
+ readResp, err := client.Do(readReq)
+ if err != nil || readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
+ if err != nil {
+ printDebug("go-readability GET error for %s: %v", pageURL, err)
+ }
+ if readResp != nil {
+ readResp.Body.Close()
+ }
return title, desc, keywords
}
defer readResp.Body.Close()
diff --git a/files-thepiratebay.go b/files-thepiratebay.go
index 3045bf6..b98ee27 100644
--- a/files-thepiratebay.go
+++ b/files-thepiratebay.go
@@ -57,34 +57,31 @@ func (t *ThePirateBay) Search(query string, category string) ([]TorrentResult, e
return []TorrentResult{}, nil
}
- searchURL := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
+ url := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
// User Agent generation
userAgent, err := GetUserAgent("files-tpb")
if err != nil {
- return nil, fmt.Errorf("error generating User-Agent: %w", err)
+ fmt.Println("Error:", err)
+ return nil, err
}
- req, err := http.NewRequest("GET", searchURL, nil)
+ req, err := http.NewRequest("GET", url, nil)
if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
+ return nil, err
}
req.Header.Set("User-Agent", userAgent)
- // Perform the request using MetaProxy if enabled
- resp, err := DoMetaProxyRequest(req)
+ client := &http.Client{}
+ response, err := client.Do(req)
if err != nil {
- return nil, fmt.Errorf("error making request to The Pirate Bay: %w", err)
- }
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
+ return nil, err
}
+ defer response.Body.Close()
var torrentData []map[string]interface{}
- if err := json.NewDecoder(resp.Body).Decode(&torrentData); err != nil {
- return nil, fmt.Errorf("error decoding response JSON: %w", err)
+ if err := json.NewDecoder(response.Body).Decode(&torrentData); err != nil {
+ return nil, err
}
var results []TorrentResult
diff --git a/files-torrentgalaxy.go b/files-torrentgalaxy.go
index 5bcd05e..51f51ca 100644
--- a/files-torrentgalaxy.go
+++ b/files-torrentgalaxy.go
@@ -62,17 +62,18 @@ func (tg *TorrentGalaxy) Search(query string, category string) ([]TorrentResult,
// User Agent generation
userAgent, err := GetUserAgent("files-torrentgalaxy")
if err != nil {
- return nil, fmt.Errorf("error generating User-Agent: %w", err)
+ fmt.Println("Error:", err)
+ return nil, err
}
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
+ return nil, err
}
req.Header.Set("User-Agent", userAgent)
- // Perform the request using MetaProxy if enabled
- resp, err := DoMetaProxyRequest(req)
+ client := &http.Client{}
+ resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
}
diff --git a/files.go b/files.go
index a93710d..d0c1ff1 100755
--- a/files.go
+++ b/files.go
@@ -30,25 +30,11 @@ var (
var fileResultsChan = make(chan []TorrentResult)
-func initFileEngines() {
-
- torrentGalaxy = nil
- thePirateBay = nil
- // nyaa = nil
- // rutor = nil
-
- for _, engineName := range config.MetaSearch.Files {
- switch engineName {
- case "TorrentGalaxy":
- torrentGalaxy = NewTorrentGalaxy()
- case "ThePirateBay":
- thePirateBay = NewThePirateBay()
- // case "Nyaa":
- // nyaa = NewNyaa()
- // case "Rutor":
- // rutor = NewRutor()
- }
- }
+func init() {
+ torrentGalaxy = NewTorrentGalaxy()
+ // nyaa = NewNyaa()
+ thePirateBay = NewThePirateBay()
+ // rutor = NewRutor()
}
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
@@ -102,7 +88,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
case results := <-cacheChan:
if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -111,12 +97,12 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
printDebug("Crawler disabled; skipping fetching.")
}
} else {
- _, torrentResults, _, _, _ := convertToSpecificResults(results)
+ _, torrentResults, _, _ := convertToSpecificResults(results)
combinedResults = torrentResults
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -131,13 +117,13 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
// If Crawler is disabled, skip fetching from torrent sites
- if !config.MetaSearchEnabled {
+ if !config.CrawlerEnabled {
printInfo("Crawler is disabled; skipping torrent site fetching.")
return []TorrentResult{}
}
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
- var results []TorrentResult
+ results := []TorrentResult{}
for _, site := range sites {
if site == nil {
@@ -154,12 +140,9 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
}
}
- // If no results, try from other nodes
if len(results) == 0 {
- if config.NodesEnabled {
- printWarn("No file results found for query: %s, trying other nodes", query)
- results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
- }
+ printWarn("No file results found for query: %s, trying other nodes", query)
+ results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
}
return results
diff --git a/forums.go b/forums.go
index a5ddebe..bd57e55 100755
--- a/forums.go
+++ b/forums.go
@@ -3,57 +3,54 @@ package main
import (
"encoding/json"
"fmt"
+ "math"
"net/http"
"net/url"
"time"
)
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
- if !config.MetaSearchEnabled {
+ if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping forum search.")
return []ForumSearchResult{}, nil
}
const (
- pageSize = 25
- baseURL = "https://www.reddit.com"
+ pageSize = 25
+ baseURL = "https://www.reddit.com"
+ maxRetries = 5
+ initialBackoff = 2 * time.Second
)
-
var results []ForumSearchResult
- offset := page * pageSize
- searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d",
- baseURL,
- url.QueryEscape(query),
- pageSize,
- offset,
- )
- // Create request
- req, err := http.NewRequest("GET", searchURL, nil)
- if err != nil {
- return nil, fmt.Errorf("creating request: %v", err)
+ searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
+ var resp *http.Response
+ var err error
+
+ // Retry logic with exponential backoff
+ for i := 0; i <= maxRetries; i++ {
+ resp, err = http.Get(searchURL)
+ if err != nil {
+ return nil, fmt.Errorf("making request: %v", err)
+ }
+ if resp.StatusCode != http.StatusTooManyRequests {
+ break
+ }
+
+ // Wait for some time before retrying
+ backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
+ time.Sleep(backoff)
}
- // Set User-Agent
- userAgent, uaErr := GetUserAgent("Reddit-Forum-Search")
- if uaErr != nil {
- return nil, fmt.Errorf("getting user agent: %v", uaErr)
- }
- req.Header.Set("User-Agent", userAgent)
-
- // Make request using MetaProxy logic
- resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
- // Validate response status
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- // Parse JSON response
var searchResults map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
return nil, fmt.Errorf("decoding response: %v", err)
@@ -69,9 +66,9 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
return nil, fmt.Errorf("no children field in data")
}
- // Extract search results
for _, post := range posts {
postData := post.(map[string]interface{})["data"].(map[string]interface{})
+
if safe == "active" && postData["over_18"].(bool) {
continue
}
@@ -81,7 +78,6 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
if len(description) > 500 {
description = description[:500] + "..."
}
-
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
permalink := postData["permalink"].(string)
resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
@@ -154,7 +150,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
case results := <-cacheChan:
if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
combinedResults = fetchForumResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -168,7 +164,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
combinedResults = fetchForumResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
diff --git a/images-bing.go b/images-bing.go
index f057ac5..b6a6aa6 100644
--- a/images-bing.go
+++ b/images-bing.go
@@ -18,21 +18,8 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
// Build the search URL
searchURL := buildBingSearchURL(query, page)
- // Create the HTTP request
- req, err := http.NewRequest("GET", searchURL, nil)
- if err != nil {
- return nil, 0, fmt.Errorf("creating request: %v", err)
- }
-
- // Set User-Agent
- ImageUserAgent, err := GetUserAgent("Image-Search-Bing")
- if err != nil {
- return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
- }
- req.Header.Set("User-Agent", ImageUserAgent)
-
- // Use MetaProxy if enabled
- resp, err := DoMetaProxyRequest(req)
+ // Make the HTTP request
+ resp, err := http.Get(searchURL)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
diff --git a/images-deviantart.go b/images-deviantart.go
index 171ac1a..3077640 100644
--- a/images-deviantart.go
+++ b/images-deviantart.go
@@ -87,15 +87,15 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
return nil, 0, err
}
- // Create the HTTP request
+ // Make the HTTP request with User-Agent header
+ client := &http.Client{}
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
req.Header.Set("User-Agent", DeviantArtImageUserAgent)
- // Perform the request using MetaProxy if enabled
- resp, err := DoMetaProxyRequest(req)
+ resp, err := client.Do(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
@@ -182,7 +182,7 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
duration := time.Since(startTime)
- // Check if the number of results is zero
+ // Check if the number of results is one or less
if len(results) == 0 {
return nil, duration, fmt.Errorf("no images found")
}
diff --git a/images-imgur.go b/images-imgur.go
index e085371..641f645 100644
--- a/images-imgur.go
+++ b/images-imgur.go
@@ -18,21 +18,7 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
var results []ImageSearchResult
searchURL := buildImgurSearchURL(query, page)
- // Create the HTTP request
- req, err := http.NewRequest("GET", searchURL, nil)
- if err != nil {
- return nil, 0, fmt.Errorf("creating request: %v", err)
- }
-
- // Get the User-Agent string
- imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
- if err != nil {
- return nil, 0, fmt.Errorf("getting user-agent: %v", err)
- }
- req.Header.Set("User-Agent", imgurUserAgent)
-
- // Perform the HTTP request with MetaProxy if enabled
- resp, err := DoMetaProxyRequest(req)
+ resp, err := http.Get(searchURL)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
@@ -42,7 +28,6 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- // Parse the HTML document
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
@@ -91,35 +76,12 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
duration := time.Since(startTime) // Calculate the duration
- if len(results) == 0 {
- return nil, duration, fmt.Errorf("no images found")
- }
-
return results, duration, nil
}
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
func scrapeImageFromImgurPage(pageURL string) string {
- req, err := http.NewRequest("GET", pageURL, nil)
- if err != nil {
- fmt.Printf("Error creating request for page: %v\n", err)
- return ""
- }
-
- // Get the User-Agent string
- imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
- if err == nil {
- req.Header.Set("User-Agent", imgurUserAgent)
- }
-
- // Perform the request using MetaProxy if enabled
- var resp *http.Response
- if config.MetaProxyEnabled && metaProxyClient != nil {
- resp, err = metaProxyClient.Do(req)
- } else {
- client := &http.Client{}
- resp, err = client.Do(req)
- }
+ resp, err := http.Get(pageURL)
if err != nil {
fmt.Printf("Error fetching page: %v\n", err)
return ""
diff --git a/images-quant.go b/images-quant.go
index ab5d677..d85d0f9 100644
--- a/images-quant.go
+++ b/images-quant.go
@@ -97,7 +97,7 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
// Ensure count + offset is within acceptable limits
if offset+resultsPerPage > 250 {
- return nil, 0, fmt.Errorf("count + offset must be lower than 250 for Qwant")
+ return nil, 0, fmt.Errorf("count + offset must be lower than 250 for quant")
}
if safe == "" {
@@ -113,21 +113,21 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
offset,
safe)
- // Create the HTTP request
+ client := &http.Client{Timeout: 10 * time.Second}
+
req, err := http.NewRequest("GET", apiURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
- // Get the User-Agent string
ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
if err != nil {
- return nil, 0, fmt.Errorf("getting user-agent: %v", err)
+ return nil, 0, err
}
- req.Header.Set("User-Agent", ImageUserAgent)
- // Perform the request with MetaProxy if enabled
- resp, err := DoMetaProxyRequest(req)
+ req.Header.Set("User-Agent", ImageUserAgent) // Quant seems to not like some specific User-Agent strings
+
+ resp, err := client.Do(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
@@ -137,13 +137,11 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- // Parse the API response
var apiResp QwantAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
return nil, 0, fmt.Errorf("decoding response: %v", err)
}
- // Process the results
var wg sync.WaitGroup
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
@@ -176,9 +174,5 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
duration := time.Since(startTime) // Calculate the duration
- if len(results) == 0 {
- return nil, duration, fmt.Errorf("no images found")
- }
-
return results, duration, nil
}
diff --git a/images.go b/images.go
index cc4d77e..a044013 100755
--- a/images.go
+++ b/images.go
@@ -10,23 +10,12 @@ import (
var imageSearchEngines []SearchEngine
-var allImageSearchEngines = []SearchEngine{
- {Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
- {Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
- {Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
- // {Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // example
-}
-
-func initImageEngines() {
- imageSearchEngines = nil
-
- for _, engineName := range config.MetaSearch.Image {
- for _, candidate := range allImageSearchEngines {
- if candidate.Name == engineName {
- imageSearchEngines = append(imageSearchEngines, candidate)
- break
- }
- }
+func init() {
+ imageSearchEngines = []SearchEngine{
+ {Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
+ {Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
+ {Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
+ //{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
}
}
@@ -97,7 +86,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
select {
case results := <-cacheChan:
if results == nil {
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
@@ -107,12 +96,12 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
printDebug("Crawler disabled; skipping fetching from image search engines.")
}
} else {
- _, _, imageResults, _, _ := convertToSpecificResults(results)
+ _, _, imageResults, _ := convertToSpecificResults(results)
combinedResults = filterValidImages(imageResults)
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
@@ -129,8 +118,8 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
var results []ImageSearchResult
- // Check if MetaSearchEnabled is false
- if !config.MetaSearchEnabled {
+ // Check if CrawlerEnabled is false
+ if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping image search engine fetching.")
return results
}
diff --git a/init.go b/init.go
index 87dc0ce..bf0d220 100644
--- a/init.go
+++ b/init.go
@@ -13,16 +13,10 @@ func main() {
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
- configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
// Parse command-line flags
flag.Parse()
- // Override global configFilePath if --config flag is provided
- if *configFlag != "" {
- configFilePath = *configFlag
- }
-
if *skipConfigFlag {
// Skip interactive configuration
if _, err := os.Stat(configFilePath); err == nil {
@@ -66,24 +60,11 @@ func main() {
}
config.PeerID = hostID
- if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
- InitProxies()
- }
-
// Initiate Browser Agent updater
- if config.MetaSearchEnabled || config.IndexerEnabled {
+ if config.CrawlerEnabled || config.IndexerEnabled {
go periodicAgentUpdate()
}
- // Load List of Meta Search Engines
- if config.MetaSearchEnabled {
- initTextEngines()
- initImageEngines()
- initFileEngines()
- initPipedInstances()
- initMusicEngines()
- }
-
InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config)
@@ -143,6 +124,11 @@ func main() {
webCrawlerInit()
+ // No longer needed as crawled data are indexed imidietly
+ // // Start periodic indexing (every 2 minutes)
+ // dataFilePath := filepath.Join(config.DriveCache.Path, "data_to_index.txt")
+ // startPeriodicIndexing(dataFilePath, 2*time.Minute)
+
printInfo("Indexer is enabled.")
} else {
printInfo("Indexer is disabled.")
diff --git a/lang/en/LC_MESSAGES/default.po b/lang/en/LC_MESSAGES/default.po
index c146dac..eb0843d 100644
--- a/lang/en/LC_MESSAGES/default.po
+++ b/lang/en/LC_MESSAGES/default.po
@@ -88,9 +88,6 @@ msgstr "Video"
msgid "videos"
msgstr "Videos"
-msgid "music"
-msgstr "Music"
-
msgid "forum"
msgstr "Forum"
diff --git a/main.go b/main.go
index 5038b6c..12c2381 100755
--- a/main.go
+++ b/main.go
@@ -164,8 +164,6 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
handleImageSearch(w, r, settings, query, page)
case "video":
handleVideoSearch(w, settings, query, page)
- case "music":
- handleMusicSearch(w, settings, query, page)
case "map":
handleMapSearch(w, settings, query)
case "forum":
@@ -228,7 +226,7 @@ func runServer() {
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
http.ServeFile(w, r, "static/opensearch.xml")
})
- printInfo("Website is enabled.")
+ printInfo("Website functionality enabled.")
} else {
// Redirect all website routes to a "service disabled" handler
http.HandleFunc("/static/", handleWebsiteDisabled)
@@ -240,7 +238,7 @@ func runServer() {
http.HandleFunc("/image_status", handleWebsiteDisabled)
http.HandleFunc("/privacy", handleWebsiteDisabled)
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
- printInfo("Website is disabled.")
+ printInfo("Website functionality disabled.")
}
if config.NodesEnabled {
@@ -254,7 +252,7 @@ func runServer() {
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
w.WriteHeader(http.StatusServiceUnavailable)
- _, _ = w.Write([]byte("The website is currently disabled."))
+ _, _ = w.Write([]byte("The website functionality is currently disabled."))
}
func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {
diff --git a/music-bandcamp.go b/music-bandcamp.go
deleted file mode 100644
index 2c3210f..0000000
--- a/music-bandcamp.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// music-bandcamp.go - Bandcamp specific implementation
-package main
-
-import (
- "fmt"
- "net/http"
- "net/url"
- "strings"
-
- "github.com/PuerkitoBio/goquery"
-)
-
-func SearchBandcamp(query string, page int) ([]MusicResult, error) {
- baseURL := "https://bandcamp.com/search?"
- params := url.Values{
- "q": []string{query},
- "page": []string{fmt.Sprintf("%d", page)},
- }
-
- resp, err := http.Get(baseURL + params.Encode())
- if err != nil {
- return nil, fmt.Errorf("request failed: %v", err)
- }
- defer resp.Body.Close()
-
- doc, err := goquery.NewDocumentFromReader(resp.Body)
- if err != nil {
- return nil, fmt.Errorf("failed to parse HTML: %v", err)
- }
-
- var results []MusicResult
-
- doc.Find("li.searchresult").Each(func(i int, s *goquery.Selection) {
- result := MusicResult{Source: "Bandcamp"}
-
- // URL extraction
- if urlSel := s.Find("div.itemurl a"); urlSel.Length() > 0 {
- result.URL = strings.TrimSpace(urlSel.Text())
- }
-
- // Title extraction
- if titleSel := s.Find("div.heading a"); titleSel.Length() > 0 {
- result.Title = strings.TrimSpace(titleSel.Text())
- }
-
- // Artist extraction
- if artistSel := s.Find("div.subhead"); artistSel.Length() > 0 {
- result.Artist = strings.TrimSpace(artistSel.Text())
- }
-
- // Thumbnail extraction
- if thumbSel := s.Find("div.art img"); thumbSel.Length() > 0 {
- result.Thumbnail, _ = thumbSel.Attr("src")
- }
-
- // // Iframe URL construction
- // if linkHref, exists := s.Find("div.itemurl a").Attr("href"); exists {
- // if itemID := extractSearchItemID(linkHref); itemID != "" {
- // itemType := strings.ToLower(strings.TrimSpace(s.Find("div.itemtype").Text()))
- // result.IframeSrc = fmt.Sprintf(
- // "https://bandcamp.com/EmbeddedPlayer/%s=%s/size=large/bgcol=000/linkcol=fff/artwork=small",
- // itemType,
- // itemID,
- // )
- // }
- // }
-
- results = append(results, result)
- })
-
- return results, nil
-}
diff --git a/music-soundcloud.go b/music-soundcloud.go
deleted file mode 100644
index f8a7221..0000000
--- a/music-soundcloud.go
+++ /dev/null
@@ -1,198 +0,0 @@
-package main
-
-import (
- "encoding/json"
- "fmt"
- "io"
- "net/http"
- "net/url"
- "regexp"
- "strings"
-
- "github.com/PuerkitoBio/goquery"
-)
-
-type SoundCloudTrack struct {
- ID int `json:"id"`
- Title string `json:"title"`
- Permalink string `json:"permalink"`
- ArtworkURL string `json:"artwork_url"`
- Duration int `json:"duration"`
- User struct {
- Username string `json:"username"`
- Permalink string `json:"permalink"`
- } `json:"user"`
- Streams struct {
- HTTPMP3128URL string `json:"http_mp3_128_url"`
- } `json:"streams"`
-}
-
-func SearchSoundCloud(query string, page int) ([]MusicResult, error) {
- clientID, err := extractClientID()
- if err != nil {
- return searchSoundCloudViaScraping(query, page)
- }
-
- apiResults, err := searchSoundCloudViaAPI(query, clientID, page)
- if err == nil && len(apiResults) > 0 {
- return convertSoundCloudResults(apiResults), nil
- }
-
- return searchSoundCloudViaScraping(query, page)
-}
-
-func searchSoundCloudViaAPI(query, clientID string, page int) ([]SoundCloudTrack, error) {
- const limit = 10
- offset := (page - 1) * limit
-
- apiUrl := fmt.Sprintf(
- "https://api-v2.soundcloud.com/search/tracks?q=%s&client_id=%s&limit=%d&offset=%d",
- url.QueryEscape(query),
- clientID,
- limit,
- offset,
- )
-
- resp, err := http.Get(apiUrl)
- if err != nil {
- return nil, err
- }
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- return nil, fmt.Errorf("API request failed with status: %d", resp.StatusCode)
- }
-
- var response struct {
- Collection []SoundCloudTrack `json:"collection"`
- }
-
- if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
- return nil, err
- }
-
- return response.Collection, nil
-}
-
-func convertSoundCloudResults(tracks []SoundCloudTrack) []MusicResult {
- var results []MusicResult
-
- for _, track := range tracks {
- thumbnail := strings.Replace(track.ArtworkURL, "large", "t500x500", 1)
- trackURL := fmt.Sprintf("https://soundcloud.com/%s/%s",
- track.User.Permalink,
- track.Permalink,
- )
-
- results = append(results, MusicResult{
- Title: track.Title,
- Artist: track.User.Username,
- URL: trackURL,
- Thumbnail: thumbnail,
- //AudioURL: track.Streams.HTTPMP3128URL,
- Source: "SoundCloud",
- Duration: fmt.Sprintf("%d", track.Duration/1000),
- })
- }
- return results
-}
-
-func searchSoundCloudViaScraping(query string, page int) ([]MusicResult, error) {
- searchUrl := fmt.Sprintf("https://soundcloud.com/search/sounds?q=%s", url.QueryEscape(query))
- resp, err := http.Get(searchUrl)
- if err != nil {
- return nil, err
- }
- defer resp.Body.Close()
-
- doc, err := goquery.NewDocumentFromReader(resp.Body)
- if err != nil {
- return nil, err
- }
-
- var results []MusicResult
- doc.Find("li.searchList__item").Each(func(i int, s *goquery.Selection) {
- titleElem := s.Find("a.soundTitle__title")
- artistElem := s.Find("a.soundTitle__username")
- artworkElem := s.Find(".sound__coverArt")
-
- title := strings.TrimSpace(titleElem.Text())
- artist := strings.TrimSpace(artistElem.Text())
- href, _ := titleElem.Attr("href")
- thumbnail, _ := artworkElem.Find("span.sc-artwork").Attr("style")
-
- if thumbnail != "" {
- if matches := regexp.MustCompile(`url\((.*?)\)`).FindStringSubmatch(thumbnail); len(matches) > 1 {
- thumbnail = strings.Trim(matches[1], `"`)
- }
- }
-
- if title == "" || href == "" {
- return
- }
-
- trackURL, err := url.Parse(href)
- if err != nil {
- return
- }
-
- if trackURL.Host == "" {
- trackURL.Scheme = "https"
- trackURL.Host = "soundcloud.com"
- }
-
- trackURL.Path = strings.ReplaceAll(trackURL.Path, "//", "/")
- fullURL := trackURL.String()
-
- results = append(results, MusicResult{
- Title: title,
- Artist: artist,
- URL: fullURL,
- Thumbnail: thumbnail,
- Source: "SoundCloud",
- })
- })
-
- return results, nil
-}
-
-func extractClientID() (string, error) {
- resp, err := http.Get("https://soundcloud.com/")
- if err != nil {
- return "", err
- }
- defer resp.Body.Close()
-
- doc, err := goquery.NewDocumentFromReader(resp.Body)
- if err != nil {
- return "", err
- }
-
- var clientID string
- doc.Find("script[src]").Each(func(i int, s *goquery.Selection) {
- if clientID != "" {
- return
- }
-
- src, _ := s.Attr("src")
- if strings.Contains(src, "sndcdn.com/assets/") {
- resp, err := http.Get(src)
- if err != nil {
- return
- }
- defer resp.Body.Close()
-
- body, _ := io.ReadAll(resp.Body)
- re := regexp.MustCompile(`client_id:"([^"]+)"`)
- matches := re.FindSubmatch(body)
- if len(matches) > 1 {
- clientID = string(matches[1])
- }
- }
- })
-
- if clientID == "" {
- return "", fmt.Errorf("client_id not found")
- }
- return clientID, nil
-}
diff --git a/music-spotify.go b/music-spotify.go
deleted file mode 100644
index d33e6a3..0000000
--- a/music-spotify.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package main
-
-import (
- "fmt"
- "net/http"
- "net/url"
- "strings"
- "time"
-
- "github.com/PuerkitoBio/goquery"
-)
-
-func SearchSpotify(query string, page int) ([]MusicResult, error) {
- searchUrl := fmt.Sprintf("https://open.spotify.com/search/%s", url.PathEscape(query))
-
- client := &http.Client{
- Timeout: 10 * time.Second,
- CheckRedirect: func(req *http.Request, via []*http.Request) error {
- return http.ErrUseLastResponse
- },
- }
-
- req, err := http.NewRequest("GET", searchUrl, nil)
- if err != nil {
- return nil, fmt.Errorf("failed to create request: %v", err)
- }
-
- // Set user agent ?
-
- resp, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("request failed: %v", err)
- }
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- return nil, fmt.Errorf("received non-200 status code: %d", resp.StatusCode)
- }
-
- doc, err := goquery.NewDocumentFromReader(resp.Body)
- if err != nil {
- return nil, fmt.Errorf("failed to parse document: %v", err)
- }
-
- var results []MusicResult
-
- // Find track elements
- doc.Find(`div[data-testid="tracklist-row"]`).Each(func(i int, s *goquery.Selection) {
- // Extract title
- title := s.Find(`div[data-testid="tracklist-row__title"] a`).Text()
- title = strings.TrimSpace(title)
-
- // Extract artist
- artist := s.Find(`div[data-testid="tracklist-row__artist"] a`).First().Text()
- artist = strings.TrimSpace(artist)
-
- // Extract duration
- duration := s.Find(`div[data-testid="tracklist-row__duration"]`).First().Text()
- duration = strings.TrimSpace(duration)
-
- // Extract URL
- path, _ := s.Find(`div[data-testid="tracklist-row__title"] a`).Attr("href")
- fullUrl := fmt.Sprintf("https://open.spotify.com%s", path)
-
- // Extract thumbnail
- thumbnail, _ := s.Find(`img[aria-hidden="false"]`).Attr("src")
-
- if title != "" && artist != "" {
- results = append(results, MusicResult{
- Title: title,
- Artist: artist,
- URL: fullUrl,
- Duration: duration,
- Thumbnail: thumbnail,
- Source: "Spotify",
- })
- }
- })
-
- return results, nil
-}
diff --git a/music-youtube.go b/music-youtube.go
deleted file mode 100644
index 698dc71..0000000
--- a/music-youtube.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package main
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
- "net/url"
-)
-
-type MusicAPIResponse struct {
- Items []struct {
- Title string `json:"title"`
- UploaderName string `json:"uploaderName"`
- Duration int `json:"duration"`
- Thumbnail string `json:"thumbnail"`
- URL string `json:"url"`
- } `json:"items"` // Removed VideoID since we'll parse from URL
-}
-
-func SearchMusicViaPiped(query string, page int) ([]MusicResult, error) {
- var lastError error
- mu.Lock()
- defer mu.Unlock()
-
- for _, instance := range pipedInstances {
- if disabledInstances[instance] {
- continue
- }
-
- url := fmt.Sprintf(
- "https://%s/search?q=%s&filter=music_songs&page=%d",
- instance,
- url.QueryEscape(query),
- page,
- )
-
- resp, err := http.Get(url)
- if err != nil || resp.StatusCode != http.StatusOK {
- printInfo("Disabling instance %s due to error: %v", instance, err)
- disabledInstances[instance] = true
- lastError = fmt.Errorf("request to %s failed: %w", instance, err)
- continue
- }
-
- defer resp.Body.Close()
- var apiResp MusicAPIResponse
- if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
- lastError = fmt.Errorf("failed to decode response from %s: %w", instance, err)
- continue
- }
-
- return convertPipedToMusicResults(instance, apiResp), nil
- }
-
- return nil, fmt.Errorf("all Piped instances failed, last error: %v", lastError)
-}
-
-func convertPipedToMusicResults(instance string, resp MusicAPIResponse) []MusicResult {
- seen := make(map[string]bool)
- var results []MusicResult
-
- for _, item := range resp.Items {
- // Extract video ID from URL
- u, err := url.Parse(item.URL)
- if err != nil {
- continue
- }
- videoID := u.Query().Get("v")
- if videoID == "" || seen[videoID] {
- continue
- }
- seen[videoID] = true
-
- results = append(results, MusicResult{
- Title: item.Title,
- Artist: item.UploaderName,
- URL: fmt.Sprintf("https://music.youtube.com%s", item.URL),
- Duration: formatDuration(item.Duration),
- Thumbnail: item.Thumbnail,
- Source: "YouTube Music",
- //AudioURL: fmt.Sprintf("https://%s/stream/%s", instance, videoID),
- })
- }
- return results
-}
diff --git a/music.go b/music.go
deleted file mode 100644
index 34dd70d..0000000
--- a/music.go
+++ /dev/null
@@ -1,178 +0,0 @@
-// music.go - Central music search handler
-package main
-
-import (
- "fmt"
- "net/http"
- "sync"
- "time"
-)
-
-type MusicSearchEngine struct {
- Name string
- Func func(query string, page int) ([]MusicResult, error)
-}
-
-var (
- musicSearchEngines []MusicSearchEngine
- cacheMutex = &sync.Mutex{}
-)
-
-var allMusicSearchEngines = []MusicSearchEngine{
- {Name: "SoundCloud", Func: SearchSoundCloud},
- {Name: "YouTube", Func: SearchMusicViaPiped},
- {Name: "Bandcamp", Func: SearchBandcamp},
- //{Name: "Spotify", Func: SearchSpotify},
-}
-
-func initMusicEngines() {
- // Initialize with all engines if no specific config
- musicSearchEngines = allMusicSearchEngines
-}
-
-func handleMusicSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
- start := time.Now()
-
- cacheKey := CacheKey{
- Query: query,
- Page: page,
- Type: "music",
- Lang: settings.SearchLanguage,
- Safe: settings.SafeSearch == "active",
- }
-
- var results []MusicResult
-
- if cached, found := resultsCache.Get(cacheKey); found {
- if musicResults, ok := convertCacheToMusicResults(cached); ok {
- results = musicResults
- }
- }
-
- if len(results) == 0 {
- results = fetchMusicResults(query, page)
- if len(results) > 0 {
- resultsCache.Set(cacheKey, convertMusicResultsToCache(results))
- }
- }
-
- go prefetchMusicPages(query, page)
-
- elapsed := time.Since(start) // Calculate duration
- fetched := fmt.Sprintf("%.2f %s", elapsed.Seconds(), Translate("seconds"))
-
- data := map[string]interface{}{
- "Results": results,
- "Query": query,
- "Page": page,
- "HasPrevPage": page > 1,
- "HasNextPage": len(results) >= 10, // Default page size
- "MusicServices": getMusicServiceNames(),
- "CurrentService": "all", // Default service
- "Theme": settings.Theme,
- "IsThemeDark": settings.IsThemeDark,
- "Trans": Translate,
- "Fetched": fetched,
- }
-
- renderTemplate(w, "music.html", data)
-}
-
-// Helper to get music service names
-func getMusicServiceNames() []string {
- names := make([]string, len(allMusicSearchEngines))
- for i, engine := range allMusicSearchEngines {
- names[i] = engine.Name
- }
- return names
-}
-
-func convertMusicResultsToCache(results []MusicResult) []SearchResult {
- cacheResults := make([]SearchResult, len(results))
- for i, r := range results {
- cacheResults[i] = r
- }
- return cacheResults
-}
-
-func convertCacheToMusicResults(cached []SearchResult) ([]MusicResult, bool) {
- results := make([]MusicResult, 0, len(cached))
- for _, item := range cached {
- if musicResult, ok := item.(MusicResult); ok {
- results = append(results, musicResult)
- } else {
- return nil, false
- }
- }
- return results, true
-}
-
-func fetchMusicResults(query string, page int) []MusicResult {
- var results []MusicResult
- resultsChan := make(chan []MusicResult, len(musicSearchEngines))
- var wg sync.WaitGroup
-
- for _, engine := range musicSearchEngines {
- wg.Add(1)
- go func(e MusicSearchEngine) {
- defer wg.Done()
- res, err := e.Func(query, page)
- if err == nil && len(res) > 0 {
- resultsChan <- res
- }
- }(engine)
- }
-
- go func() {
- wg.Wait()
- close(resultsChan)
- }()
-
- for res := range resultsChan {
- results = append(results, res...)
- if len(results) >= 50 { // Default max results
- break
- }
- }
-
- return deduplicateResults(results)
-}
-
-func prefetchMusicPages(query string, currentPage int) {
- for _, page := range []int{currentPage - 1, currentPage + 1} {
- if page < 1 {
- continue
- }
- cacheKey := CacheKey{
- Query: query,
- Page: page,
- Type: "music",
- }
- if _, found := resultsCache.Get(cacheKey); !found {
- go fetchMusicResults(query, page)
- }
- }
-}
-
-func deduplicateResults(results []MusicResult) []MusicResult {
- seen := make(map[string]bool)
- var unique []MusicResult
-
- for _, res := range results {
- if !seen[res.URL] {
- seen[res.URL] = true
- unique = append(unique, res)
- }
- }
- return unique
-}
-
-// func generatePlayerHTML(result MusicResult) template.HTML {
-// if result.IframeSrc != "" {
-// return template.HTML(fmt.Sprintf(
-// ``,
-// result.IframeSrc,
-// ))
-// }
-// return template.HTML("")
-// }
diff --git a/node.go b/node.go
index aac0804..5fd247a 100644
--- a/node.go
+++ b/node.go
@@ -5,7 +5,7 @@ import (
"crypto/rand"
"encoding/json"
"fmt"
- "io"
+ "io/ioutil"
"net/http"
"time"
)
@@ -65,10 +65,7 @@ func sendMessage(serverAddr string, msg Message) error {
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
- body, err := io.ReadAll(resp.Body)
- if err != nil {
- return fmt.Errorf("failed to read response body: %v", err)
- }
+ body, _ := ioutil.ReadAll(resp.Body)
return fmt.Errorf("server error: %s", body)
}
diff --git a/proxy.go b/proxy.go
deleted file mode 100644
index 0f2a26a..0000000
--- a/proxy.go
+++ /dev/null
@@ -1,270 +0,0 @@
-package main
-
-import (
- "fmt"
- "net/http"
- "strings"
- "sync"
- "time"
-
- "golang.org/x/net/proxy"
-)
-
-// ProxyConfig holds configuration for a single proxy.
-type ProxyConfig struct {
- Address string
- Username string
- Password string
-}
-
-// ProxyClient provides an HTTP client pool for proxies.
-type ProxyClient struct {
- clients []*http.Client
- lock sync.Mutex
- index int
-}
-
-// Package-level proxy clients
-var (
- metaProxyClient *ProxyClient
- crawlerProxyClient *ProxyClient
-)
-
-// NewProxyClientPool creates a pool of HTTP clients with SOCKS5 proxies.
-func NewProxyClientPool(proxies []ProxyConfig, timeout time.Duration) (*ProxyClient, error) {
- if len(proxies) == 0 {
- return nil, fmt.Errorf("no proxies provided")
- }
-
- clients := make([]*http.Client, len(proxies))
-
- for i, pc := range proxies {
- var auth *proxy.Auth
- if pc.Username != "" || pc.Password != "" {
- auth = &proxy.Auth{
- User: pc.Username,
- Password: pc.Password,
- }
- }
- dialer, err := proxy.SOCKS5("tcp", pc.Address, auth, proxy.Direct)
- if err != nil {
- return nil, fmt.Errorf("failed to create SOCKS5 dialer for %s: %w", pc.Address, err)
- }
-
- transport := &http.Transport{Dial: dialer.Dial}
- clients[i] = &http.Client{
- Transport: transport,
- Timeout: timeout,
- }
- }
-
- return &ProxyClient{clients: clients}, nil
-}
-
-// Do sends an HTTP request using the next proxy in the pool.
-func (p *ProxyClient) Do(req *http.Request) (*http.Response, error) {
- p.lock.Lock()
- client := p.clients[p.index]
- p.index = (p.index + 1) % len(p.clients)
- p.lock.Unlock()
- return client.Do(req)
-}
-
-func (p *ProxyClient) GetProxy() string {
- p.lock.Lock()
- defer p.lock.Unlock()
-
- if len(p.clients) == 0 {
- return ""
- }
-
- // Round-robin proxy retrieval
- client := p.clients[p.index]
- p.index = (p.index + 1) % len(p.clients)
-
- // Assume each client has a proxy string saved
- // Example implementation depends on how your proxies are configured
- proxyTransport, ok := client.Transport.(*http.Transport)
- if ok && proxyTransport.Proxy != nil {
- proxyURL, _ := proxyTransport.Proxy(nil)
- if proxyURL != nil {
- return proxyURL.String()
- }
- }
-
- return ""
-}
-
-// ParseProxies parses the proxy strings in the format ADDRESS:PORT or ADDRESS:PORT:USER:PASSWORD.
-func ParseProxies(proxyStrings []string) []ProxyConfig {
- var proxies []ProxyConfig
- for _, proxyStr := range proxyStrings {
- parts := strings.Split(proxyStr, ":")
- switch len(parts) {
- case 2: // ADDRESS:PORT
- proxies = append(proxies, ProxyConfig{
- Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
- })
- case 4: // ADDRESS:PORT:USER:PASSWORD
- proxies = append(proxies, ProxyConfig{
- Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
- Username: parts[2],
- Password: parts[3],
- })
- default:
- fmt.Printf("Invalid proxy format: %s\n", proxyStr)
- }
- }
- return proxies
-}
-
-// InitProxies initializes the proxy clients for Meta and Crawler proxies.
-func InitProxies() {
- // Initialize Meta Proxy Client
- if config.MetaProxyEnabled {
- metaProxies := ParseProxies(config.MetaProxies)
- client, err := NewProxyClientPool(metaProxies, 30*time.Second)
- if err != nil {
- if config.MetaProxyStrict {
- panic(fmt.Sprintf("Failed to initialize Meta proxies: %v", err))
- }
- fmt.Printf("Warning: Meta proxy initialization failed: %v\n", err)
- }
- metaProxyClient = client
- }
-
- // Initialize Crawler Proxy Client
- if config.CrawlerProxyEnabled {
- crawlerProxies := ParseProxies(config.CrawlerProxies)
- client, err := NewProxyClientPool(crawlerProxies, 30*time.Second)
- if err != nil {
- if config.CrawlerProxyStrict {
- panic(fmt.Sprintf("Failed to initialize Crawler proxies: %v", err))
- }
- fmt.Printf("Warning: Crawler proxy initialization failed: %v\n", err)
- }
- crawlerProxyClient = client
- }
-}
-
-// Doer is an interface so we can accept *http.Client or *ProxyClient for requests.
-type Doer interface {
- Do(*http.Request) (*http.Response, error)
-}
-
-// DoProxyRequest handles “try direct, then proxy if needed,” with retries if proxy is used.
-//
-// - strict: if true, always try proxy first if enabled; if not available, do one direct attempt
-// - enabled: whether this type of proxy is turned on
-// - retryCount: how many times to retry with the proxy
-// - proxyClient: the pool of proxy connections
-func DoProxyRequest(req *http.Request, strict bool, enabled bool, retryCount int, proxyClient *ProxyClient) (*http.Response, error) {
- // 1) If !strict => try direct once first
- if !strict {
- resp, err := tryRequestOnce(req, http.DefaultClient)
- if isSuccessful(resp, err) {
- return resp, nil
- }
- // If direct fails => if proxy is enabled, retry
- if enabled && proxyClient != nil {
- resp, err = tryRequestWithRetry(req, proxyClient, retryCount)
- if isSuccessful(resp, err) {
- return resp, nil
- }
- return nil, fmt.Errorf("failed after direct & proxy attempts: %v", err)
- }
- return nil, fmt.Errorf("request failed direct, no valid proxy: %v", err)
- }
-
- // 2) If strict => if proxy is enabled, try it up to “retryCount”
- if enabled && proxyClient != nil {
- resp, err := tryRequestWithRetry(req, proxyClient, retryCount)
- if isSuccessful(resp, err) {
- return resp, nil
- }
- return nil, fmt.Errorf("failed after %d proxy attempts: %v", retryCount, err)
- }
-
- // If strict but no proxy => direct once
- resp, err := tryRequestOnce(req, http.DefaultClient)
- if isSuccessful(resp, err) {
- return resp, nil
- }
- return nil, fmt.Errorf("direct request failed in strict mode, no proxy: %v", err)
-}
-
-// Helper Wrapper functions for DoProxyRequest()
-func DoMetaProxyRequest(req *http.Request) (*http.Response, error) {
- return DoProxyRequest(
- req,
- config.MetaProxyStrict,
- config.MetaProxyEnabled,
- config.MetaProxyRetry,
- metaProxyClient,
- )
-}
-func DoCrawlerProxyRequest(req *http.Request) (*http.Response, error) {
- return DoProxyRequest(
- req,
- config.CrawlerProxyStrict,
- config.CrawlerProxyEnabled,
- config.CrawlerProxyRetry,
- metaProxyClient,
- )
-}
-
-// tryRequestWithRetry tries the request up to "retries" times, waiting 200ms between attempts.
-func tryRequestWithRetry(req *http.Request, client Doer, retries int) (*http.Response, error) {
- var resp *http.Response
- var err error
- for i := 1; i <= retries; i++ {
- if resp != nil {
- resp.Body.Close()
- }
- printDebug("Attempt %d of %d with proxy/client...", i, retries)
- resp, err = tryRequestOnce(req, client)
- if isSuccessful(resp, err) {
- return resp, nil
- }
- time.Sleep(200 * time.Millisecond)
- }
- return resp, err
-}
-
-// tryRequestOnce sends a single request with the given client. If client is nil, uses default client.
-func tryRequestOnce(req *http.Request, client Doer) (*http.Response, error) {
- if client == nil {
- client = http.DefaultClient
- }
- resp, err := client.Do(req)
- return resp, err
-}
-
-// isSuccessful checks if err==nil & resp != nil & resp.StatusCode in [200..299].
-func isSuccessful(resp *http.Response, err error) bool {
- if err != nil || resp == nil {
- return false
- }
- return resp.StatusCode >= 200 && resp.StatusCode < 300
-}
-
-// func main() {
-// config := loadConfig()
-
-// // Initialize proxies if enabled
-// if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
-// InitProxies()
-// }
-
-// // Example usage
-// if metaProxyClient != nil {
-// req, _ := http.NewRequest("GET", "https://example.com", nil)
-// resp, err := metaProxyClient.Do(req)
-// if err != nil {
-// fmt.Printf("Error using MetaProxyClient: %v\n", err)
-// } else {
-// fmt.Printf("Meta Proxy Response Status: %s\n", resp.Status)
-// resp.Body.Close()
-// }
-// }
-// }
diff --git a/run.bat b/run.bat
index dd485f9..eb3919d 100755
--- a/run.bat
+++ b/run.bat
@@ -5,7 +5,7 @@ rem Initialize variables
set SKIP_CONFIG=""
set PORT=""
set DOMAIN=""
-set CONFIG_FILE=""
+set BUILD_MODE=false
set BUILD_OUTPUT=qgato.exe
rem Parse arguments
@@ -23,14 +23,13 @@ if "%~1"=="--domain" (
shift
goto parse_args
)
-if "%~1"=="--config" (
- set CONFIG_FILE=%~2
- shift
+if "%~1"=="--skip-config-check" (
+ set SKIP_CONFIG=--skip-config-check
shift
goto parse_args
)
-if "%~1"=="--skip-config-check" (
- set SKIP_CONFIG=--skip-config-check
+if "%~1"=="--build" (
+ set BUILD_MODE=true
shift
goto parse_args
)
@@ -51,29 +50,37 @@ for %%f in (*.go) do (
)
)
-rem Always delete and rebuild the binary
-echo Cleaning previous build...
-if exist "%BUILD_OUTPUT%" del "%BUILD_OUTPUT%"
+if "%BUILD_MODE%"=="true" (
+ rem Build mode
+ echo Building application...
+ go build -o "%BUILD_OUTPUT%" !GO_FILES!
+ if errorlevel 1 (
+ echo Build failed!
+ exit /b 1
+ )
+ echo Build successful! Output: %CD%\%BUILD_OUTPUT%
+) else (
+ rem Check if the executable exists
+ if not exist "%BUILD_OUTPUT%" (
+ echo Executable not found. Building it first...
+ go build -o "%BUILD_OUTPUT%" !GO_FILES!
+ if errorlevel 1 (
+ echo Build failed! Unable to run the application.
+ exit /b 1
+ )
+ )
-echo Building application...
-go build -o "%BUILD_OUTPUT%" !GO_FILES!
-if errorlevel 1 (
- echo Build failed!
- exit /b 1
+ rem Construct the command
+ set CMD="%BUILD_OUTPUT% !SKIP_CONFIG!"
+ if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
+ if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
+
+ rem Informative output
+ echo Starting application with command: !CMD!
+
+ rem Run the application
+ call !CMD!
)
-echo Build successful! Output: %CD%\%BUILD_OUTPUT%
-
-rem Construct the command
-set CMD=%BUILD_OUTPUT% !SKIP_CONFIG!
-if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
-if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
-if not "%CONFIG_FILE%"=="" set CMD=!CMD! --config %CONFIG_FILE%
-
-rem Informative output
-echo Starting application with command: !CMD!
-
-rem Run the built executable
-call !CMD!
rem Return to the original directory
popd
diff --git a/run.sh b/run.sh
index 089e11f..2aeefad 100755
--- a/run.sh
+++ b/run.sh
@@ -4,7 +4,7 @@
SKIP_CONFIG=""
PORT=""
DOMAIN=""
-CONFIG_FILE=""
+BUILD_MODE=false
BUILD_OUTPUT="qgato"
# Parse arguments
@@ -18,14 +18,14 @@ while [ $# -gt 0 ]; do
DOMAIN=$2
shift 2
;;
- --config)
- CONFIG_FILE=$2
- shift 2
- ;;
--skip-config-check)
SKIP_CONFIG="--skip-config-check"
shift
;;
+ --build)
+ BUILD_MODE=true
+ shift
+ ;;
*)
echo "Unknown argument: $1"
exit 1
@@ -39,26 +39,33 @@ SCRIPT_DIR=$(dirname "$0")
# List all Go files in the script directory (excluding test files)
GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print)
-# Always delete and rebuild the binary
-echo "Cleaning previous build..."
-rm -f "$SCRIPT_DIR/$BUILD_OUTPUT"
-
-echo "Building application..."
-go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
-if [ $? -eq 0 ]; then
- echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
+if $BUILD_MODE; then
+ # Build mode
+ echo "Building application..."
+ go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
+ if [ $? -eq 0 ]; then
+ echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
+ else
+ echo "Build failed!"
+ exit 1
+ fi
else
- echo "Build failed!"
- exit 1
+ # Run mode
+ CMD="./$BUILD_OUTPUT $SKIP_CONFIG"
+ [ -n "$PORT" ] && CMD="$CMD --port $PORT"
+ [ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
+
+ if [ ! -f "$SCRIPT_DIR/$BUILD_OUTPUT" ]; then
+ echo "Executable not found. Building it first..."
+ go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
+ if [ $? -ne 0 ]; then
+ echo "Build failed! Unable to run the application."
+ exit 1
+ fi
+ fi
+
+ echo "Starting application with command: $CMD"
+
+ # Run the executable
+ eval $CMD
fi
-
-# Construct the run command
-CMD="$SCRIPT_DIR/$BUILD_OUTPUT $SKIP_CONFIG"
-[ -n "$PORT" ] && CMD="$CMD --port $PORT"
-[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
-[ -n "$CONFIG_FILE" ] && CMD="$CMD --config $CONFIG_FILE"
-
-echo "Starting application with command: $CMD"
-
-# Run the built executable
-eval $CMD
diff --git a/static/css/style-music.css b/static/css/style-music.css
deleted file mode 100644
index fccc9cd..0000000
--- a/static/css/style-music.css
+++ /dev/null
@@ -1,119 +0,0 @@
-/* Music Results Styling */
-.result-item.music-item {
- display: flex;
- gap: 16px;
- margin-bottom: 24px;
- align-items: flex-start;
-}
-
-.music-thumbnail {
- position: relative;
- flex: 0 0 160px;
- aspect-ratio: 1;
- border-radius: 8px;
- overflow: hidden;
- background: var(--placeholder-bg);
-}
-
-.music-thumbnail img {
- width: 100%;
- height: 100%;
- object-fit: cover;
- transition: transform 0.2s ease;
-}
-
-.music-thumbnail:hover img {
- transform: scale(1.03);
-}
-
-.thumbnail-placeholder {
- width: 100%;
- height: 100%;
- display: flex;
- align-items: center;
- justify-content: center;
- background: var(--placeholder-bg);
- color: var(--placeholder-icon);
-}
-
-.thumbnail-placeholder .material-icons-round {
- font-size: 2.5rem;
-}
-
-.duration-overlay {
- position: absolute;
- bottom: 8px;
- right: 8px;
- background: rgba(0, 0, 0, 0.8);
- color: white;
- padding: 4px 8px;
- border-radius: 4px;
- font-size: 12px;
- font-weight: 500;
- backdrop-filter: blur(2px);
-}
-
-.music-info {
- flex: 1;
- min-width: 0;
- padding-top: 4px;
-}
-
-.music-title {
- margin: 0 0 8px 0;
- font-size: 18px;
- line-height: 1.3;
- font-weight: 500;
- color: var(--text-primary);
-}
-
-.music-title:hover {
- text-decoration: underline;
-}
-
-.music-meta {
- display: flex;
- align-items: center;
- gap: 8px;
- font-size: 14px;
- color: var(--text-secondary);
-}
-
-.artist {
- color: var(--accent-color);
- font-weight: 500;
-}
-
-.meta-separator {
- color: var(--border-color);
- font-size: 12px;
-}
-
-/* Responsive Design */
-@media (max-width: 768px) {
- .music-thumbnail {
- flex-basis: 120px;
- }
-
- .music-title {
- font-size: 16px;
- }
-
- .music-meta {
- font-size: 13px;
- gap: 6px;
- }
-}
-
-@media (max-width: 480px) {
- .music-thumbnail {
- flex-basis: 100px;
- }
-
- .duration-overlay {
- font-size: 11px;
- padding: 3px 6px;
- bottom: 6px;
- right: 6px;
- }
-}
\ No newline at end of file
diff --git a/static/css/style.css b/static/css/style.css
index a09cc6d..e4b1cd6 100644
--- a/static/css/style.css
+++ b/static/css/style.css
@@ -60,17 +60,8 @@
visibility: hidden;
}
-.fetched_dif_videos {
- margin-top: 110px !important;
-}
-
-.fetched_dif_files{
- margin-top: 10px !important;
-}
-
-
-.fetched_dif_images {
- margin-top: 10px ;
+.fetched_dif {
+ margin-top: 110px !important;
}
.fetched_img {
@@ -632,10 +623,6 @@ hr {
text-align: left;
}
-.torrent-cat {
- margin-top: 110px;
-}
-
.torrent-cat:hover,
.torrent-settings:hover,
.torrent-sort-save:hover {
@@ -1602,27 +1589,15 @@ body, h1, p, a, input, button {
}
.fetched_img {
- margin-top: 25px !important;
+ margin-top: 135px !important;
margin-left: 1.2% !important;
left: 0px !important;
}
.fetched_vid {
- margin-top: 25px !important;
+ margin-top: 135px !important;
}
- .fetched_dif_videos {
- margin-top: 135px !important;
- }
-
- .fetched_dif_files{
- margin-top: 25px !important;
- }
-
- .fetched_dif_images {
- margin-top: 25px;
- }
-
.results_settings {
left: 20px;
font-size: 13px;
@@ -1634,7 +1609,6 @@ body, h1, p, a, input, button {
}
form.torrent-sort {
- margin-top: 35px;
left: 20px;
}
diff --git a/static/fonts/MaterialIcons-Round.woff2 b/static/fonts/MaterialIcons-Round.woff2
deleted file mode 100644
index f94dba5..0000000
Binary files a/static/fonts/MaterialIcons-Round.woff2 and /dev/null differ
diff --git a/static/fonts/material-icons-round-v108-latin-regular.woff2 b/static/fonts/material-icons-round-v108-latin-regular.woff2
index c143837..6f6a973 100644
Binary files a/static/fonts/material-icons-round-v108-latin-regular.woff2 and b/static/fonts/material-icons-round-v108-latin-regular.woff2 differ
diff --git a/static/js/dynamicscrollingimages.js b/static/js/dynamicscrollingimages.js
index d6eb3e7..6969a53 100644
--- a/static/js/dynamicscrollingimages.js
+++ b/static/js/dynamicscrollingimages.js
@@ -1,152 +1,197 @@
(function() {
// Configuration
- const imageStatusInterval = 500;
- const scrollThreshold = 500;
+ const imageStatusInterval = 500; // Interval in milliseconds to check image status
+ const scrollThreshold = 500; // Distance from bottom of the page to trigger loading
const loadingIndicator = document.getElementById('message-bottom-left');
let loadingTimer;
let isFetching = false;
let page = parseInt(document.getElementById('template-data').getAttribute('data-page')) || 1;
let query = document.getElementById('template-data').getAttribute('data-query');
let hardCacheEnabled = document.getElementById('template-data').getAttribute('data-hard-cache-enabled') === 'true';
- let noMoreImages = false;
+ let noMoreImages = false; // Flag to indicate if there are no more images to load
let imageElements = [];
let imageIds = [];
- let imageStatusTimer;
+ /**
+ * Function to handle image load errors with retry logic
+ * @param {HTMLElement} imgElement - The image element that failed to load
+ * @param {number} retryCount - Number of retries left
+ * @param {number} retryDelay - Delay between retries in milliseconds
+ */
function handleImageError(imgElement, retryCount = 3, retryDelay = 1000) {
if (retryCount > 0) {
setTimeout(() => {
imgElement.src = imgElement.getAttribute('data-full');
- imgElement.onerror = () => handleImageError(imgElement, retryCount - 1, retryDelay);
+ imgElement.onerror = function() {
+ handleImageError(imgElement, retryCount - 1, retryDelay);
+ };
}, retryDelay);
} else {
- console.warn('Image failed to load:', imgElement.getAttribute('data-full'));
- imgElement.parentElement.style.display = 'none';
+ // After retries, hide the image container or set a fallback image
+ console.warn('Image failed to load after retries:', imgElement.getAttribute('data-full'));
+ imgElement.parentElement.style.display = 'none'; // Hide the image container
+ // Alternatively, set a fallback image:
+ // imgElement.src = '/static/images/fallback.svg';
}
}
+ /**
+ * Function to ensure the page is scrollable by loading more images if necessary
+ */
function ensureScrollable() {
- if (noMoreImages) return;
+ if (noMoreImages) return; // Do not attempt if no more images are available
+ // Check if the page is not scrollable
if (document.body.scrollHeight <= window.innerHeight) {
+ // If not scrollable, fetch the next page
fetchNextPage();
}
}
+ /**
+ * Function to fetch the next page of images
+ */
function fetchNextPage() {
if (isFetching || noMoreImages) return;
+
+ // Start the timer for loading indicator
loadingTimer = setTimeout(() => {
loadingIndicator.style.display = 'flex';
}, 150);
+
isFetching = true;
page += 1;
-
+
fetch(`/search?q=${encodeURIComponent(query)}&t=image&p=${page}&ajax=true`)
.then(response => response.text())
.then(html => {
- clearTimeout(loadingTimer);
- loadingIndicator.style.display = 'none';
-
- let tempDiv = document.createElement('div');
- tempDiv.innerHTML = html;
- let newImages = tempDiv.querySelectorAll('.image');
-
+ clearTimeout(loadingTimer); // Clear the timer if fetch is successful
+ loadingIndicator.style.display = 'none'; // Hide the loading indicator
+
+ let parser = new DOMParser();
+ let doc = parser.parseFromString(html, 'text/html');
+ let newImages = doc.querySelectorAll('.image');
+
if (newImages.length > 0) {
let resultsContainer = document.querySelector('.images');
newImages.forEach(imageDiv => {
- let clonedImageDiv = imageDiv.cloneNode(true);
- resultsContainer.appendChild(clonedImageDiv);
+ // Append new images to the container
+ resultsContainer.appendChild(imageDiv);
- let img = clonedImageDiv.querySelector('img');
- if (img && img.getAttribute('data-id')) {
- if (hardCacheEnabled) {
- img.src = '/static/images/placeholder.svg';
- img.onerror = () => handleImageError(img);
+ // Get the img element
+ let img = imageDiv.querySelector('img');
+ if (img) {
+ let id = img.getAttribute('data-id');
+ if (id) {
imageElements.push(img);
- imageIds.push(img.getAttribute('data-id'));
+ imageIds.push(id);
+ }
+ if (hardCacheEnabled) {
+ // Replace image with placeholder
+ img.src = '/static/images/placeholder.svg';
+ img.onerror = function() {
+ handleImageError(img);
+ };
} else {
+ // HardCacheEnabled is false; load images immediately
img.src = img.getAttribute('data-full');
- img.onerror = () => handleImageError(img);
+ img.onerror = function() {
+ handleImageError(img);
+ };
}
}
});
-
if (hardCacheEnabled) {
- checkImageStatus(); // Immediately check status for new images
+ checkImageStatus();
}
+ // After appending new images, ensure the page is scrollable
ensureScrollable();
} else {
+ // No more images to load
noMoreImages = true;
}
isFetching = false;
})
.catch(error => {
- clearTimeout(loadingTimer);
- loadingIndicator.style.display = 'none';
- console.error('Fetch error:', error);
+ clearTimeout(loadingTimer); // Clear the timer if fetch fails
+ loadingIndicator.style.display = 'none'; // Hide the loading indicator
+ console.error('Error fetching next page:', error);
isFetching = false;
});
}
+ /**
+ * Function to check image status via AJAX
+ */
function checkImageStatus() {
- if (!hardCacheEnabled || imageIds.length === 0) return;
+ if (!hardCacheEnabled) return;
+ if (imageIds.length === 0) {
+ // No images to check, do nothing
+ return;
+ }
+ // Send AJAX request to check image status
fetch(`/image_status?image_ids=${imageIds.join(',')}`)
.then(response => response.json())
.then(statusMap => {
- const pendingImages = [];
- const pendingIds = [];
-
- imageElements.forEach(img => {
- const id = img.getAttribute('data-id');
+ imageElements = imageElements.filter(img => {
+ let id = img.getAttribute('data-id');
if (statusMap[id]) {
+ // Image is ready, update src
img.src = statusMap[id];
- img.onerror = () => handleImageError(img);
- } else {
- pendingImages.push(img);
- pendingIds.push(id);
+ img.onerror = function() {
+ handleImageError(img);
+ };
+ // Remove the image id from the list
+ imageIds = imageIds.filter(imageId => imageId !== id);
+ return false; // Remove img from imageElements
}
+ return true; // Keep img in imageElements
});
-
- imageElements = pendingImages;
- imageIds = pendingIds;
+ // After updating images, ensure the page is scrollable
ensureScrollable();
})
.catch(error => {
- console.error('Status check error:', error);
+ console.error('Error checking image status:', error);
});
}
- // Initialize
- document.querySelectorAll('img[data-id]').forEach(img => {
- const id = img.getAttribute('data-id');
- if (id) {
- imageElements.push(img);
- imageIds.push(id);
- if (hardCacheEnabled) {
- img.src = '/static/images/placeholder.svg';
- } else {
- img.src = img.getAttribute('data-full');
- }
- img.onerror = () => handleImageError(img);
- }
- });
+ // Initialize imageElements and imageIds
+ imageElements = Array.from(document.querySelectorAll('img[data-id]'));
+ imageIds = imageElements
+ .map(img => img.getAttribute('data-id'))
+ .filter(id => id); // Exclude empty IDs
if (hardCacheEnabled) {
- imageStatusTimer = setInterval(checkImageStatus, imageStatusInterval);
- checkImageStatus();
+ // Replace images with placeholders
+ imageElements.forEach(img => {
+ img.src = '/static/images/placeholder.svg';
+ });
+
+ // Start checking image status
+ let imageStatusTimer = setInterval(checkImageStatus, imageStatusInterval);
+ checkImageStatus(); // Initial check
+ } else {
+ // HardCacheEnabled is false; load images immediately
+ imageElements.forEach(img => {
+ img.src = img.getAttribute('data-full');
+ img.onerror = function() {
+ handleImageError(img);
+ };
+ });
}
+ // After initial images are loaded, ensure the page is scrollable
window.addEventListener('load', ensureScrollable);
- window.addEventListener('scroll', () => {
+
+ // Infinite scrolling
+ window.addEventListener('scroll', function() {
if (isFetching || noMoreImages) return;
+
if (window.innerHeight + window.scrollY >= document.body.offsetHeight - scrollThreshold) {
+ // User scrolled near the bottom
fetchNextPage();
}
});
- // Cleanup
- window.addEventListener('beforeunload', () => {
- if (imageStatusTimer) clearInterval(imageStatusTimer);
- });
})();
\ No newline at end of file
diff --git a/templates/files.html b/templates/files.html
index 94fc0c6..ff35355 100755
--- a/templates/files.html
+++ b/templates/files.html
@@ -103,10 +103,6 @@
-
-
-
-
@@ -124,6 +120,8 @@
+ {{ translate "fetched_in" .Fetched }}
+
{{ if .Results }}
- {{ translate "fetched_in" .Fetched }}
-
{{ range .Results }}
diff --git a/templates/forums.html b/templates/forums.html
index c8400bb..1476537 100755
--- a/templates/forums.html
+++ b/templates/forums.html
@@ -103,10 +103,6 @@
-
-
-
-
@@ -136,8 +132,6 @@
-
{{ translate "fetched_in" .Fetched }}
-
{{if .Results}}
{{range .Results}}
diff --git a/templates/images.html b/templates/images.html
index 5a44b0b..fa6df07 100755
--- a/templates/images.html
+++ b/templates/images.html
@@ -113,10 +113,6 @@
-
-
-
-
@@ -150,8 +146,7 @@
-
-
{{ translate "fetched_in" .Fetched }}
+
{{ if .Results }}
diff --git a/templates/map.html b/templates/map.html
index 3d6e5a3..054f910 100644
--- a/templates/map.html
+++ b/templates/map.html
@@ -118,10 +118,6 @@
-
-
-
-
diff --git a/templates/music.html b/templates/music.html
deleted file mode 100644
index 2ac7198..0000000
--- a/templates/music.html
+++ /dev/null
@@ -1,191 +0,0 @@
-
-
-
-
-
- {{ if .IsThemeDark }}
-
- {{ end }}
-
{{ .Query }} - Music Search - {{ translate "site_name" }}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-

-
QGato
-
A open-source private search engine.
-
-
-
-
-
-
-
-
-
-
{{ translate "fetched_in" .Fetched }}
-
-
- {{if .Results}}
- {{range .Results}}
-
-
-
-
{{.Title}}
-
- {{.Artist}}
- |
- {{.Source}}
-
-
-
- {{end}}
- {{else if .NoResults}}
-
- {{ translate "no_results_found" .Query }}
- {{ translate "suggest_rephrase" }}
-
- {{else}}
-
{{ translate "no_more_results" }}
- {{end}}
-
-
- {{ translate "searching_for_new_results" }}
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/templates/search.html b/templates/search.html
index a6e0b12..44445fe 100755
--- a/templates/search.html
+++ b/templates/search.html
@@ -121,13 +121,6 @@
{{ translate "videos" }}
-
-
-
-
- {{ translate "music" }}
-
{{ translate "forums" }}
@@ -136,9 +132,7 @@
{{end}}
{{ translate "save_settings" }}
-
-
{{ translate "fetched_in" .Fetched }}
-
+
{{if .Results}}
{{range .Results}}
diff --git a/templates/videos.html b/templates/videos.html
index a011b2c..8cc21de 100644
--- a/templates/videos.html
+++ b/templates/videos.html
@@ -103,10 +103,6 @@
{{ translate "videos" }}
-
-
- {{ translate "music" }}
-
{{ translate "forums" }}
@@ -124,7 +120,7 @@
-
{{ translate "fetched_in" .Fetched }}
+
{{ translate "fetched_in" .Fetched }}
{{ if .Results }}
{{ range .Results }}
diff --git a/tests/integration_test.go b/tests/integration_test.go
index f462e30..89d5fc7 100644
--- a/tests/integration_test.go
+++ b/tests/integration_test.go
@@ -2,6 +2,7 @@ package tests
import (
"bufio"
+ "context"
"crypto/rand"
"encoding/json"
"fmt"
@@ -9,7 +10,9 @@ import (
"math/big"
"net/http"
"net/url"
+ "os"
"os/exec"
+ "path/filepath"
"sync"
"syscall"
"testing"
@@ -53,27 +56,50 @@ func TestApplication(t *testing.T) {
// Ensure the test runs from the root directory
rootDir := "../" // Path to the root directory of the repository
- // Run the application using `run.sh`
- runCmd := exec.Command("sh", "./run.sh", "--skip-config-check")
- runCmd.Dir = rootDir
+ // Build the application using `run.sh --build`
+ buildCmd := exec.Command("sh", "./run.sh", "--build")
+ buildCmd.Dir = rootDir
+
+ buildOutput, err := buildCmd.CombinedOutput()
+ if err != nil {
+ t.Fatalf("Failed to build application: %v\nOutput:\n%s", err, string(buildOutput))
+ }
+ t.Log("Application built successfully")
+
+ // Path to the built executable relative to rootDir
+ executablePath := "./qgato" // Since cmd.Dir is rootDir, this path is relative to rootDir
+
+ // Ensure the executable has execute permissions
+ execFullPath := filepath.Join(rootDir, "qgato")
+ if err := os.Chmod(execFullPath, 0755); err != nil {
+ t.Fatalf("Failed to set execute permissions on the executable: %v", err)
+ }
+
+ // Create a context with cancellation
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel() // Ensure resources are cleaned up
+
+ // Start the application using the built executable
+ cmd := exec.CommandContext(ctx, executablePath, "--skip-config-check")
+ cmd.Dir = rootDir // Set the working directory to the root directory
// Set process group ID so we can kill it and its children
- runCmd.SysProcAttr = &syscall.SysProcAttr{
+ cmd.SysProcAttr = &syscall.SysProcAttr{
Setpgid: true,
}
// Capture application output for logging
- appStdout, err := runCmd.StdoutPipe()
+ appStdout, err := cmd.StdoutPipe()
if err != nil {
t.Fatalf("Failed to capture stdout: %v", err)
}
- appStderr, err := runCmd.StderrPipe()
+ appStderr, err := cmd.StderrPipe()
if err != nil {
t.Fatalf("Failed to capture stderr: %v", err)
}
// Start the application
- if err := runCmd.Start(); err != nil {
+ if err := cmd.Start(); err != nil {
t.Fatalf("Failed to start application: %v", err)
}
@@ -94,28 +120,28 @@ func TestApplication(t *testing.T) {
// Defer cleanup to ensure process is killed after the test
defer func() {
// Kill the process group
- pgid, err := syscall.Getpgid(runCmd.Process.Pid)
+ pgid, err := syscall.Getpgid(cmd.Process.Pid)
if err == nil {
syscall.Kill(-pgid, syscall.SIGKILL)
} else {
t.Logf("Failed to get process group ID: %v", err)
- runCmd.Process.Kill()
+ cmd.Process.Kill()
}
- runCmd.Wait()
+ cmd.Wait()
// Print summary
printSummary(summary, t)
}()
// Wait for the server to start
- if !waitForServer("http://localhost:5000", 600*time.Second) {
+ if !waitForServer("http://localhost:5000", 15*time.Second) {
t.Fatalf("Server did not start within the expected time")
}
t.Log("Application is running")
// Create a process instance for the application
- appProcess, err := process.NewProcess(int32(runCmd.Process.Pid))
+ appProcess, err := process.NewProcess(int32(cmd.Process.Pid))
if err != nil {
t.Fatalf("Failed to create process instance: %v", err)
}
diff --git a/text-brave.go b/text-brave.go
index 09b9dbe..43e752e 100644
--- a/text-brave.go
+++ b/text-brave.go
@@ -2,7 +2,7 @@ package main
import (
"fmt"
- "io"
+ "io/ioutil"
"net/http"
"net/url"
"strings"
@@ -17,83 +17,62 @@ func PerformBraveTextSearch(query, safe, lang string, offset int) ([]TextSearchR
var results []TextSearchResult
// Build the search URL
- searchURL := fmt.Sprintf("https://search.brave.com/search?q=%s", url.QueryEscape(query))
- if offset > 1 {
- searchURL += fmt.Sprintf("&offset=%d&spellcheck=0", offset-1)
- }
+ searchURL := fmt.Sprintf("https://search.brave.com/search?q=%s&offset=%d", url.QueryEscape(query), offset)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
- printWarn("Error creating request: %v", err)
return nil, 0, fmt.Errorf("creating request: %v", err)
}
- TextUserAgent, err := GetUserAgent("Text-Search-Brave")
+ // Set headers including User-Agent
+ TextUserAgent, err := GetUserAgent("Text-Search")
if err != nil {
- printWarn("Error generating User-Agent: %v", err)
return nil, 0, err
}
req.Header.Set("User-Agent", TextUserAgent)
- // Single call to DoMetaProxyRequest:
- resp, err := DoMetaProxyRequest(req)
+ // Perform the HTTP request
+ client := &http.Client{}
+ resp, err := client.Do(req)
if err != nil {
- printWarn("Error performing request: %v", err)
- return nil, 0, fmt.Errorf("performing meta-request: %v", err)
+ return nil, 0, fmt.Errorf("performing request: %v", err)
}
defer resp.Body.Close()
// Read the response body
- body, err := io.ReadAll(resp.Body)
+ body, err := ioutil.ReadAll(resp.Body)
if err != nil {
- printWarn("Error reading response body: %v", err)
return nil, 0, fmt.Errorf("reading response body: %v", err)
}
// Parse the response body
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
if err != nil {
- return nil, 0, fmt.Errorf("parsing HTML: %v", err)
+ return nil, 0, fmt.Errorf("parsing response body: %v", err)
}
- // Only grab .snippet blocks that have data-type="web"
- doc.Find(`.snippet[data-type="web"]`).Each(func(i int, s *goquery.Selection) {
+ // Extract search results
+ doc.Find(".snippet").Each(func(i int, s *goquery.Selection) {
+ title := s.Find(".title").Text()
+ description := s.Find(".snippet-description").Text()
+ url, exists := s.Find("a").Attr("href")
- // The main clickable link is
- anchor := s.Find("a.heading-serpresult").First()
- link, ok := anchor.Attr("href")
- if !ok || link == "" {
- return
- }
-
- // Title is inside
- title := strings.TrimSpace(anchor.Find(".title").Text())
- if title == "" {
- // fallback if the .title is slightly off in the DOM
- title = strings.TrimSpace(s.Find(".title").Text())
- }
-
- // Description is inside
- desc := strings.TrimSpace(s.Find(".snippet-description").Text())
-
- // Add only if everything is non-empty
- if title != "" && desc != "" {
+ // Add to results only if all components are present
+ if title != "" && description != "" && exists && url != "" {
results = append(results, TextSearchResult{
Header: title,
- URL: link,
- Description: desc,
+ URL: url,
+ Description: description,
})
}
})
- duration := time.Since(startTime)
+ duration := time.Since(startTime) // Calculate the duration
// Return an error if no results are found
if len(results) == 0 {
- printDebug("No results found for query")
return nil, duration, fmt.Errorf("no results found")
}
- printDebug("Search completed successfully found %d results", len(results))
return results, duration, nil
}
diff --git a/text-duckduckgo.go b/text-duckduckgo.go
index 6fc836a..b4033c1 100644
--- a/text-duckduckgo.go
+++ b/text-duckduckgo.go
@@ -16,36 +16,21 @@ func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSear
var results []TextSearchResult
searchURL := buildDuckDuckGoSearchURL(query, page)
- // Create a request
- req, err := http.NewRequest("GET", searchURL, nil)
+ resp, err := http.Get(searchURL)
if err != nil {
- return nil, 0, fmt.Errorf("creating request: %v", err)
- }
-
- userAgent, err := GetUserAgent("duckduck-text-search")
- if err != nil {
- return nil, 0, err
- }
- req.Header.Set("User-Agent", userAgent)
-
- resp, err := DoMetaProxyRequest(req)
- if err != nil {
- return nil, 0, fmt.Errorf("failed to do meta-request: %v", err)
+ return nil, 0, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
- // Check for HTTP status code
if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- // Parse HTML response
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
}
- // Extract results from the page
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
header := s.Find(".result__a").Text()
description := s.Find(".result__snippet").Text()
diff --git a/text-google.go b/text-google.go
index 346c76c..a706aff 100644
--- a/text-google.go
+++ b/text-google.go
@@ -11,46 +11,45 @@ import (
)
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
- startTime := time.Now()
const resultsPerPage = 10
+ var results []TextSearchResult
- // 1) Build the search URL
+ startTime := time.Now() // Start the timer
+
+ client := &http.Client{}
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
- // 2) Create a new request
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("failed to create request: %v", err)
}
- // 3) Generate and set a User-Agent header
- userAgent, err := GetUserAgent("Google-Text-Search")
+ // User Agent generation
+ TextUserAgent, err := GetUserAgent("Text-Search")
if err != nil {
return nil, 0, err
}
- req.Header.Set("User-Agent", userAgent)
- // 4) Use the meta-proxy wrapper
- resp, err := DoMetaProxyRequest(req)
+ req.Header.Set("User-Agent", TextUserAgent)
+
+ resp, err := client.Do(req)
if err != nil {
- return nil, 0, fmt.Errorf("failed to do meta-request: %v", err)
+ return nil, 0, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
- // 5) Check HTTP status
if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- // 6) Parse the HTML response
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
}
- results := parseResults(doc)
- // 7) Calculate duration
- duration := time.Since(startTime)
+ results = parseResults(doc)
+
+ duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
printDebug("No results found from Google Search")
@@ -66,7 +65,7 @@ func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
}
langParam := ""
- glParam := ""
+ var glParam string
if lang != "" {
// Use lang as the geolocation
@@ -78,16 +77,12 @@ func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
}
startIndex := (page - 1) * resultsPerPage
- udmParam := "&udm=14" // Add the required parameter
- // Build the URL string
- baseURL := "https://www.google.com/search?q=%s%s%s%s&start=%d%s"
- fullURL := fmt.Sprintf(baseURL, url.QueryEscape(query), safeParam, langParam, glParam, startIndex, udmParam)
+ printDebug(fmt.Sprintf("https://www.google.com/search?q=%s%s%s%s&start=%d",
+ url.QueryEscape(query), safeParam, langParam, glParam, startIndex))
- // Debug print
- printDebug("%s", fullURL)
-
- return fullURL
+ return fmt.Sprintf("https://www.google.com/search?q=%s%s%s%s&start=%d",
+ url.QueryEscape(query), safeParam, langParam, glParam, startIndex)
}
// func getRandomGeoLocation() (string, string) {
diff --git a/text-librex.go b/text-librex.go
index 622d343..55eeb91 100644
--- a/text-librex.go
+++ b/text-librex.go
@@ -3,11 +3,14 @@ package main
import (
"encoding/json"
"fmt"
+ "log"
"net/http"
"net/url"
"time"
)
+const LIBREX_DOMAIN = "librex.antopie.org"
+
type LibreXResult struct {
Title string `json:"title"`
URL string `json:"url"`
@@ -17,65 +20,65 @@ type LibreXResult struct {
type LibreXResponse []LibreXResult
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
- startTime := time.Now()
+ startTime := time.Now() // Start the timer
- // LibreX uses offset instead of page (starting at 0)
- pageOffset := (page - 1) * 10
+ // LibreX/Y uses offset instead of page that starts at 0
+ page--
+ page = page * 10
- // Generate User-Agent
+ searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
+
+ // User Agent generation
userAgent, err := GetUserAgent("librex-text-search")
if err != nil {
return nil, 0, err
}
- var allResults []TextSearchResult
+ req, err := http.NewRequest("GET", searchURL, nil)
+ if err != nil {
+ return nil, 0, err
+ }
+ req.Header.Set("User-Agent", userAgent)
- for _, domain := range config.LibreXInstances {
- searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0",
- domain,
- url.QueryEscape(query),
- pageOffset,
- )
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, 0, logError("error making request to LibreX", err)
+ }
+ defer resp.Body.Close()
- req, err := http.NewRequest("GET", searchURL, nil)
- if err != nil {
- printWarn("failed to create request for domain %s: %v", domain, err)
- continue
- }
- req.Header.Set("User-Agent", userAgent)
-
- resp, err := DoMetaProxyRequest(req)
- if err != nil {
- return nil, 0, fmt.Errorf("failed to do meta-request: %v", err)
- }
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- printWarn("unexpected status code from %s: %d", domain, resp.StatusCode)
- continue
- }
-
- var librexResp LibreXResponse
- if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
- printWarn("error decoding response from %s: %v", domain, err)
- continue
- }
-
- // Accumulate results from this instance
- for _, item := range librexResp {
- allResults = append(allResults, TextSearchResult{
- URL: item.URL,
- Header: item.Title,
- Description: item.Description,
- Source: "LibreX",
- })
- }
+ if resp.StatusCode != http.StatusOK {
+ return nil, 0, logError("unexpected status code", fmt.Errorf("%d", resp.StatusCode))
}
- duration := time.Since(startTime)
- if len(allResults) == 0 {
- return nil, duration, fmt.Errorf("no results found from any LibreX instance")
+ var librexResp LibreXResponse
+ if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
+ return nil, 0, logError("error decoding LibreX response", err)
}
- return allResults, duration, nil
+ var results []TextSearchResult
+ for _, item := range librexResp {
+ result := TextSearchResult{
+ URL: item.URL,
+ Header: item.Title,
+ Description: item.Description,
+ Source: "LibreX",
+ }
+
+ results = append(results, result)
+ }
+
+ duration := time.Since(startTime) // Calculate the duration
+
+ if len(results) == 0 {
+ return nil, duration, fmt.Errorf("no results found")
+ }
+
+ return results, duration, nil
+}
+
+// This is just stupid it will probbably lead to printing error twice
+func logError(message string, err error) error {
+ log.Printf("%s: %v", message, err)
+ return fmt.Errorf("%s: %w", message, err)
}
diff --git a/text-quant.go b/text-quant.go
index 959879d..de8b03a 100644
--- a/text-quant.go
+++ b/text-quant.go
@@ -26,11 +26,9 @@ type QwantTextAPIResponse struct {
}
// PerformQwantTextSearch contacts the Qwant API and returns a slice of TextSearchResult
-func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
- startTime := time.Now()
-
+func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error) {
const resultsPerPage = 10
- offset := (page - 1) * resultsPerPage
+ const offset = 0
// Ensure safe search is disabled by default if not specified
if safe == "" {
@@ -42,44 +40,39 @@ func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchRes
lang = "en_CA"
}
- apiURL := fmt.Sprintf(
- "https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop",
+ apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop",
url.QueryEscape(query),
resultsPerPage,
lang,
- offset,
- )
+ offset)
+
+ client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", apiURL, nil)
if err != nil {
- // Return three values: nil for the slice, 0 for duration, error for the third.
- return nil, 0, fmt.Errorf("creating request: %v", err)
+ return nil, fmt.Errorf("creating request: %v", err)
}
- userAgent, err := GetUserAgent("Quant-Text-Search")
- if err != nil {
- return nil, 0, err
- }
- req.Header.Set("User-Agent", userAgent)
+ req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36")
- resp, err := DoMetaProxyRequest(req)
+ resp, err := client.Do(req)
if err != nil {
- return nil, 0, fmt.Errorf("failed to do meta-request: %v", err)
+ return nil, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
- return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
+ return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
var apiResp QwantTextAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
- return nil, 0, fmt.Errorf("decoding response: %v", err)
+ return nil, fmt.Errorf("decoding response: %v", err)
}
// Extracting results from the nested JSON structure
if len(apiResp.Data.Result.Items.Mainline) == 0 {
- return nil, 0, fmt.Errorf("no search results found")
+ return nil, fmt.Errorf("no search results found")
}
var results []TextSearchResult
@@ -93,8 +86,7 @@ func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchRes
})
}
- duration := time.Since(startTime)
- return results, duration, nil
+ return results, nil
}
// cleanQwantURL extracts the main part of the URL, removing tracking information
diff --git a/text-searchxng.go b/text-searchxng.go
index 68cd315..3ed118f 100644
--- a/text-searchxng.go
+++ b/text-searchxng.go
@@ -3,56 +3,31 @@ package main
import (
"encoding/json"
"fmt"
- "io"
+ "io/ioutil"
+ "log"
"net/http"
- "net/url"
- "strings"
"time"
)
type Instance struct {
- URL string `json:"-"` // Populated from map key
- Analytics bool `json:"analytics"`
- Comments []string `json:"comments"`
- AlternativeUrls map[string]interface{} `json:"alternativeUrls"`
- Main bool `json:"main"`
- NetworkType string `json:"network_type"`
- HTTP struct {
- StatusCode int `json:"status_code"`
- Error string `json:"error"`
- } `json:"http"`
- Version string `json:"version"`
- Grade string `json:"grade"`
- GradeURL string `json:"gradeUrl"`
- Generator string `json:"generator"`
- ContactURL FlexibleType `json:"contact_url"` // Custom type
- DocsURL string `json:"docs_url"`
-}
-
-type FlexibleType struct {
- StringValue string
- BoolValue bool
- IsString bool
+ URL string `json:"url"`
+ Status int `json:"status"`
+ SSLGrade string `json:"ssl_grade"`
}
const searxInstancesURL = "https://searx.space/data/instances.json"
-// FetchInstances fetches available SearX instances from the registry.
func fetchInstances() ([]Instance, error) {
+ client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", searxInstancesURL, nil)
if err != nil {
- return nil, fmt.Errorf("creating request: %v", err)
+ return nil, err
}
+ req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
- XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
+ resp, err := client.Do(req)
if err != nil {
- return nil, fmt.Errorf("generating User-Agent: %v", err)
- }
- req.Header.Set("User-Agent", XNGUserAgent)
-
- resp, err := DoMetaProxyRequest(req)
- if err != nil {
- return nil, fmt.Errorf("failed to do meta-request: %v", err)
+ return nil, err
}
defer resp.Body.Close()
@@ -60,105 +35,44 @@ func fetchInstances() ([]Instance, error) {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- body, err := io.ReadAll(resp.Body)
+ body, err := ioutil.ReadAll(resp.Body)
if err != nil {
- return nil, fmt.Errorf("reading response body: %v", err)
+ return nil, err
}
- // Root structure of the JSON response
- var root struct {
- Instances map[string]Instance `json:"instances"`
- }
-
- // Unmarshal JSON into the root structure
- err = json.Unmarshal(body, &root)
- if err != nil {
- return nil, fmt.Errorf("parsing response JSON: %v", err)
- }
-
- // Collect instances into a slice
var instances []Instance
- for url, instance := range root.Instances {
- instance.URL = url // Assign the URL from the map key
- instances = append(instances, instance)
+ err = json.Unmarshal(body, &instances)
+ if err != nil {
+ return nil, err
}
return instances, nil
}
-// UnmarshalJSON implements custom unmarshalling for FlexibleType.
-func (f *FlexibleType) UnmarshalJSON(data []byte) error {
- // Try to unmarshal as a string
- var str string
- if err := json.Unmarshal(data, &str); err == nil {
- f.StringValue = str
- f.IsString = true
- return nil
- }
-
- // Try to unmarshal as a bool
- var b bool
- if err := json.Unmarshal(data, &b); err == nil {
- f.BoolValue = b
- f.IsString = false
- return nil
- }
-
- // Return an error if neither works
- return fmt.Errorf("invalid FlexibleType: %s", string(data))
-}
-
-// String returns the string representation of FlexibleType.
-func (f FlexibleType) String() string {
- if f.IsString {
- return f.StringValue
- }
- return fmt.Sprintf("%v", f.BoolValue)
-}
-
-// ValidateInstance checks if a SearX instance is valid by performing a test query.
func validateInstance(instance Instance) bool {
- // Skip .onion instances
- if strings.Contains(instance.URL, ".onion") {
- printDebug("Skipping .onion instance: %s", instance.URL)
- return false
- }
-
- client := &http.Client{
- Timeout: 10 * time.Second,
- }
-
- testURL := fmt.Sprintf("%s/search?q=test&categories=general&language=en&safe_search=1&page=1&format=json", instance.URL)
- req, err := http.NewRequest("GET", testURL, nil)
+ client := &http.Client{Timeout: 10 * time.Second}
+ req, err := http.NewRequest("GET", fmt.Sprintf("%s/search?q=test&categories=general&language=en&safe_search=1&page=1&format=json", instance.URL), nil)
if err != nil {
- printDebug("Error creating SearchXNG request for instance validation: %v", err)
+ log.Printf("Error creating request for URL: %s, Error: %v", instance.URL, err)
return false
}
-
- XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
- if err != nil {
- printWarn("Error generating User-Agent: %v", err)
- return false
- }
- req.Header.Set("User-Agent", XNGUserAgent)
+ req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
resp, err := client.Do(req)
if err != nil {
- printDebug("Error performing request for SearchXNG instance validation: %v", err)
+ log.Printf("Error performing request for URL: %s, Error: %v", instance.URL, err)
return false
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
- printDebug("SearchXNG Instance validation failed. StatusCode: %d", resp.StatusCode)
+ log.Printf("Instance validation failed for URL: %s, StatusCode: %d", instance.URL, resp.StatusCode)
return false
}
- // Successful validation
return true
}
-// GetValidInstance fetches and validates SearX instances, returning a valid one.
func getValidInstance() (*Instance, error) {
instances, err := fetchInstances()
if err != nil {
@@ -174,81 +88,11 @@ func getValidInstance() (*Instance, error) {
return nil, fmt.Errorf("no valid SearX instances found")
}
-// PerformSearXTextSearch performs a text search using a SearX instance.
-func PerformSearXTextSearch(query, categories, language string, page int) ([]TextSearchResult, time.Duration, error) {
- // Default value for "safe" search
- safe := "1"
+// func main() {
+// instance, err := getValidInstance()
+// if err != nil {
+// log.Fatalf("Failed to get a valid SearX instance: %v", err)
+// }
- startTime := time.Now() // Start the timer
- var results []TextSearchResult
-
- instance, err := getValidInstance()
- if err != nil {
- return nil, 0, fmt.Errorf("failed to get a valid SearX instance: %w", err)
- }
-
- searchURL := fmt.Sprintf("%s/search?q=%s&categories=%s&language=%s&safe_search=%s&page=%d&format=json",
- instance.URL, url.QueryEscape(query), categories, language, safe, page)
-
- req, err := http.NewRequest("GET", searchURL, nil)
- if err != nil {
- return nil, 0, fmt.Errorf("creating request: %v", err)
- }
-
- XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
- if err != nil {
- return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
- }
- req.Header.Set("User-Agent", XNGUserAgent)
-
- var resp *http.Response
- if config.MetaProxyEnabled && metaProxyClient != nil {
- resp, err = metaProxyClient.Do(req)
- } else {
- client := &http.Client{Timeout: 10 * time.Second}
- resp, err = client.Do(req)
- }
- if err != nil {
- return nil, 0, fmt.Errorf("performing request: %v", err)
- }
- defer resp.Body.Close()
-
- body, err := io.ReadAll(resp.Body)
- if err != nil {
- return nil, 0, fmt.Errorf("reading response body: %v", err)
- }
-
- // Parse the JSON response to extract search results
- var response map[string]interface{}
- err = json.Unmarshal(body, &response)
- if err != nil {
- return nil, 0, fmt.Errorf("parsing response JSON: %v", err)
- }
-
- // Extract search results
- if items, ok := response["results"].([]interface{}); ok {
- for _, item := range items {
- if result, ok := item.(map[string]interface{}); ok {
- title := strings.TrimSpace(fmt.Sprintf("%v", result["title"]))
- url := strings.TrimSpace(fmt.Sprintf("%v", result["url"]))
- description := strings.TrimSpace(fmt.Sprintf("%v", result["content"]))
-
- results = append(results, TextSearchResult{
- Header: title,
- URL: url,
- Description: description,
- })
- }
- }
- }
-
- duration := time.Since(startTime) // Calculate the duration
-
- if len(results) == 0 {
- printDebug("No results found for query: %s", query)
- return nil, duration, fmt.Errorf("no results found")
- }
-
- printDebug("Search completed successfully for query: %s, found %d results", query, len(results))
- return results, duration, nil
-}
+// log.Printf("Selected SearX instance: %s", instance.URL)
+// }
diff --git a/text.go b/text.go
index 3422f84..d6e3212 100755
--- a/text.go
+++ b/text.go
@@ -8,26 +8,13 @@ import (
var textSearchEngines []SearchEngine
-var allTextSearchEngines = []SearchEngine{
- {Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch)},
- {Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
- {Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
- {Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
- {Name: "Quant", Func: wrapTextSearchFunc(PerformQwantTextSearch)}, // Broken !
- //{Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXTextSearch)}, // bruh
-}
-
-func initTextEngines() {
- // textSearchEngines is your final slice (already declared globally)
- textSearchEngines = nil // or make([]SearchEngine, 0)
-
- for _, engineName := range config.MetaSearch.Text {
- for _, candidate := range allTextSearchEngines {
- if candidate.Name == engineName {
- textSearchEngines = append(textSearchEngines, candidate)
- break
- }
- }
+func init() {
+ textSearchEngines = []SearchEngine{
+ {Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch)},
+ {Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
+ {Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
+ {Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
+ // {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
}
}
@@ -92,7 +79,7 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
- textResults, _, _, _, _ := convertToSpecificResults(results)
+ textResults, _, _, _ := convertToSpecificResults(results)
combinedResults = textResults
}
case <-time.After(2 * time.Second):
@@ -111,7 +98,7 @@ func prefetchPage(query, safe, lang string, page int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists {
printInfo("Page %d not cached, caching now...", page)
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
@@ -124,11 +111,10 @@ func prefetchPage(query, safe, lang string, page int) {
}
}
-// The logic in this function is rotating search engines instead of running them in order as noted in the wiki
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult
- if !config.MetaSearchEnabled {
+ if !config.CrawlerEnabled {
printDebug("Crawler is disabled; fetching from local index.")
// Calculate the starting position based on the page number
diff --git a/user-settings.go b/user-settings.go
index 68c3a39..a872f11 100755
--- a/user-settings.go
+++ b/user-settings.go
@@ -72,9 +72,8 @@ func saveUserSettings(w http.ResponseWriter, settings UserSettings) {
Value: cd.GetValue(settings),
Path: "/",
Expires: expiration,
- Secure: true, // Ensure HTTPS is required
- HttpOnly: true,
- SameSite: http.SameSiteStrictMode, // Restrict cross-site usage
+ Secure: true,
+ SameSite: http.SameSiteStrictMode,
})
}
diff --git a/video.go b/video.go
index 193773c..3120367 100644
--- a/video.go
+++ b/video.go
@@ -12,16 +12,25 @@ import (
const retryDuration = 12 * time.Hour // Retry duration for unresponding piped instances
var (
- pipedInstances = []string{}
+ pipedInstances = []string{
+ "api.piped.yt",
+ "pipedapi.moomoo.me",
+ "pipedapi.darkness.services",
+ "pipedapi.kavin.rocks",
+ "piped-api.hostux.net",
+ "pipedapi.syncpundit.io",
+ "piped-api.cfe.re",
+ "pipedapi.in.projectsegfau.lt",
+ "piapi.ggtyler.dev",
+ "piped-api.codespace.cz",
+ "pipedapi.coldforge.xyz",
+ "pipedapi.osphost.fi",
+ }
disabledInstances = make(map[string]bool)
mu sync.Mutex
videoResultsChan = make(chan []VideoResult) // Channel to receive video results from other nodes
)
-func initPipedInstances() {
- pipedInstances = config.MetaSearch.Video
-}
-
// VideoAPIResponse matches the structure of the JSON response from the Piped API
type VideoAPIResponse struct {
Items []struct {
@@ -142,7 +151,7 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
start := time.Now()
var results []VideoResult
- if config.MetaSearchEnabled {
+ if config.CrawlerEnabled {
results = fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
}
@@ -175,12 +184,12 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
// Check if the crawler is enabled
- if !config.MetaSearchEnabled {
+ if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping video search.")
return []VideoResult{}
}
- // Proceed with Piped API request if MetaSearchEnabled
+ // Proceed with Piped API request if CrawlerEnabled
apiResp, err := makeHTMLRequest(query, safe, lang, page)
if err != nil {
printWarn("Error fetching video results: %v", err)