Compare commits
10 commits
Author | SHA1 | Date | |
---|---|---|---|
|
614ce8903e | ||
|
234f1dd3be | ||
|
24c7a09479 | ||
|
851b93bed5 | ||
|
dfb8c35bc6 | ||
|
27c29c185a | ||
|
0851e9e9f2 | ||
3d893ad94d | |||
|
7886a3e60f | ||
|
8bb6fdc03d |
27 changed files with 748 additions and 244 deletions
24
README.md
24
README.md
|
@ -47,11 +47,11 @@ A self-hosted private search engine designed to be scalable and more resource-ef
|
||||||
|
|
||||||
### For Self-Hosting
|
### For Self-Hosting
|
||||||
|
|
||||||
- **Self-hosted option** - Run on your own server for even more privacy.
|
- **[Easy to Set Up](https://weforge.xyz/Spitfire/Search#running-the-qgato)** - Quick and straightforward setup process for anyone.
|
||||||
- **Lightweight** - Low memory footprint (15-30MiB) even during searches.
|
- **Lightweight** - Low memory footprint (15-30MiB) even during searches.
|
||||||
- **Decentralized** - No single point of failure.
|
- **Decentralized** - No single point of failure.
|
||||||
- **Results caching in RAM** - Faster response times through caching.
|
- **Results caching in RAM** - Faster response times through caching.
|
||||||
- **Configurable** - Tweak features via `config.ini`.
|
- **[Configurable](https://weforge.xyz/Spitfire/Search/wiki/Configuration)** - Fully customizable via the `config.ini` file.
|
||||||
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
|
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
|
||||||
|
|
||||||
### Results Sources
|
### Results Sources
|
||||||
|
@ -73,30 +73,20 @@ A self-hosted private search engine designed to be scalable and more resource-ef
|
||||||
|
|
||||||
### Running the QGato
|
### Running the QGato
|
||||||
|
|
||||||
Linux:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://weforge.xyz/Spitfire/Search.git
|
git clone https://weforge.xyz/Spitfire/Search.git
|
||||||
cd Search
|
cd Search
|
||||||
chmod +x ./run.sh
|
go run .
|
||||||
./run.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Windows:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
git clone https://weforge.xyz/Spitfire/Search.git
|
|
||||||
cd Search
|
|
||||||
.\run.bat
|
|
||||||
```
|
```
|
||||||
|
|
||||||
*Its that easy!*
|
*Its that easy!*
|
||||||
|
|
||||||
### Configuring
|
### Configuring
|
||||||
|
|
||||||
Configuration is done via the ``config.ini`` file.
|
- Configuration is done via the `config.ini` file.
|
||||||
On first start, you will be guided through the basic setup.
|
- On first start, you will be guided through the basic setup.
|
||||||
More advanced setup and all options will be listed here later, as this is still being updated.
|
- For more advanced configuration options, visit the [Wiki Configuration Page](https://weforge.xyz/Spitfire/Search/wiki/Configuration).
|
||||||
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|
48
config.go
48
config.go
|
@ -29,11 +29,18 @@ type Config struct {
|
||||||
Peers []string
|
Peers []string
|
||||||
Domain string // Added
|
Domain string // Added
|
||||||
NodesEnabled bool // Added
|
NodesEnabled bool // Added
|
||||||
CrawlerEnabled bool // Added
|
MetaSearchEnabled bool // Added
|
||||||
IndexerEnabled bool // Added
|
IndexerEnabled bool // Added
|
||||||
WebsiteEnabled bool // Added
|
WebsiteEnabled bool // Added
|
||||||
RamCacheEnabled bool
|
RamCacheEnabled bool
|
||||||
DriveCacheEnabled bool // Added
|
DriveCacheEnabled bool // Added
|
||||||
|
MetaProxyEnabled bool // Added
|
||||||
|
MetaProxyStrict bool // Added
|
||||||
|
MetaProxies []string // Added
|
||||||
|
CrawlerProxyEnabled bool // Added
|
||||||
|
CrawlerProxyStrict bool // Added
|
||||||
|
CrawlerProxies []string // Added
|
||||||
|
// Maybye add Proxy support for Image Extraction?
|
||||||
LogLevel int // Added
|
LogLevel int // Added
|
||||||
ConcurrentStandardCrawlers int
|
ConcurrentStandardCrawlers int
|
||||||
ConcurrentChromeCrawlers int
|
ConcurrentChromeCrawlers int
|
||||||
|
@ -51,11 +58,17 @@ var defaultConfig = Config{
|
||||||
Peers: []string{},
|
Peers: []string{},
|
||||||
AuthCode: generateStrongRandomString(64),
|
AuthCode: generateStrongRandomString(64),
|
||||||
NodesEnabled: false,
|
NodesEnabled: false,
|
||||||
CrawlerEnabled: true,
|
MetaSearchEnabled: true,
|
||||||
IndexerEnabled: false,
|
IndexerEnabled: false,
|
||||||
WebsiteEnabled: true,
|
WebsiteEnabled: true,
|
||||||
RamCacheEnabled: true,
|
RamCacheEnabled: true,
|
||||||
DriveCacheEnabled: false,
|
DriveCacheEnabled: false,
|
||||||
|
MetaProxyEnabled: false,
|
||||||
|
MetaProxyStrict: true,
|
||||||
|
MetaProxies: []string{},
|
||||||
|
CrawlerProxyEnabled: false,
|
||||||
|
CrawlerProxyStrict: true,
|
||||||
|
CrawlerProxies: []string{},
|
||||||
ConcurrentStandardCrawlers: 12,
|
ConcurrentStandardCrawlers: 12,
|
||||||
ConcurrentChromeCrawlers: 4,
|
ConcurrentChromeCrawlers: 4,
|
||||||
CrawlingInterval: 24 * time.Hour,
|
CrawlingInterval: 24 * time.Hour,
|
||||||
|
@ -245,14 +258,23 @@ func saveConfig(config Config) {
|
||||||
// Features section
|
// Features section
|
||||||
featuresSec := cfg.Section("Features")
|
featuresSec := cfg.Section("Features")
|
||||||
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
|
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
|
||||||
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
|
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.MetaSearchEnabled))
|
||||||
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
|
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
|
||||||
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
||||||
|
featuresSec.Key("MetaProxy").SetValue(strconv.FormatBool(config.MetaProxyEnabled))
|
||||||
|
featuresSec.Key("CrawlerProxy").SetValue(strconv.FormatBool(config.CrawlerProxyEnabled))
|
||||||
|
|
||||||
|
// Proxies section
|
||||||
|
proxiesSec := cfg.Section("Proxies")
|
||||||
|
proxiesSec.Key("MetaProxyStrict").SetValue(strconv.FormatBool(config.MetaProxyStrict))
|
||||||
|
proxiesSec.Key("MetaProxies").SetValue(strings.Join(config.MetaProxies, ","))
|
||||||
|
proxiesSec.Key("CrawlerProxyStrict").SetValue(strconv.FormatBool(config.CrawlerProxyStrict))
|
||||||
|
proxiesSec.Key("CrawlerProxies").SetValue(strings.Join(config.CrawlerProxies, ","))
|
||||||
|
|
||||||
// Indexer section
|
// Indexer section
|
||||||
indexerSec := cfg.Section("Indexer")
|
indexerSec := cfg.Section("Indexer")
|
||||||
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
|
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
|
||||||
indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
|
indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentChromeCrawlers))
|
||||||
indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
|
indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
|
||||||
indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
|
indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
|
||||||
indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
|
indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
|
||||||
|
@ -292,11 +314,19 @@ func loadConfig() Config {
|
||||||
|
|
||||||
// Features
|
// Features
|
||||||
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
|
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
|
||||||
crawlerEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.CrawlerEnabled)
|
metaSearchEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.MetaSearchEnabled)
|
||||||
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
|
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
|
||||||
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
|
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
|
||||||
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
|
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
|
||||||
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
|
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
|
||||||
|
metaProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("MetaProxy"), defaultConfig.MetaProxyEnabled)
|
||||||
|
crawlerProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("CrawlerProxy"), defaultConfig.CrawlerProxyEnabled)
|
||||||
|
|
||||||
|
// Proxies
|
||||||
|
metaProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("MetaProxyStrict"), defaultConfig.MetaProxyStrict)
|
||||||
|
metaProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("MetaProxies"), ""), ",")
|
||||||
|
crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
|
||||||
|
crawlerProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("CrawlerProxies"), ""), ",")
|
||||||
|
|
||||||
// Indexing
|
// Indexing
|
||||||
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
|
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
|
||||||
|
@ -325,11 +355,17 @@ func loadConfig() Config {
|
||||||
AuthCode: authCode,
|
AuthCode: authCode,
|
||||||
Peers: peers,
|
Peers: peers,
|
||||||
NodesEnabled: nodesEnabled,
|
NodesEnabled: nodesEnabled,
|
||||||
CrawlerEnabled: crawlerEnabled,
|
MetaSearchEnabled: metaSearchEnabled,
|
||||||
IndexerEnabled: indexerEnabled,
|
IndexerEnabled: indexerEnabled,
|
||||||
WebsiteEnabled: websiteEnabled,
|
WebsiteEnabled: websiteEnabled,
|
||||||
RamCacheEnabled: ramCacheEnabled,
|
RamCacheEnabled: ramCacheEnabled,
|
||||||
DriveCacheEnabled: driveCacheEnabled,
|
DriveCacheEnabled: driveCacheEnabled,
|
||||||
|
MetaProxyEnabled: metaProxyEnabled,
|
||||||
|
MetaProxyStrict: metaProxyStrict,
|
||||||
|
MetaProxies: metaProxies,
|
||||||
|
CrawlerProxyEnabled: crawlerProxyEnabled,
|
||||||
|
CrawlerProxyStrict: crawlerProxyStrict,
|
||||||
|
CrawlerProxies: crawlerProxies,
|
||||||
ConcurrentStandardCrawlers: concurrentStandardCrawlers,
|
ConcurrentStandardCrawlers: concurrentStandardCrawlers,
|
||||||
ConcurrentChromeCrawlers: concurrentChromeCrawlers,
|
ConcurrentChromeCrawlers: concurrentChromeCrawlers,
|
||||||
CrawlingInterval: crawlingInterval,
|
CrawlingInterval: crawlingInterval,
|
||||||
|
|
|
@ -32,8 +32,12 @@ func fetchPageMetadataStandard(pageURL, userAgent string) (string, string, strin
|
||||||
|
|
||||||
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
|
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
|
||||||
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
|
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
|
||||||
// Create context
|
// Create a custom allocator context for Chromedp with proxy support if enabled
|
||||||
ctx, cancel := chromedp.NewContext(context.Background())
|
allocCtx, cancelAlloc := chromedp.NewExecAllocator(context.Background(), configureChromeOptions()...)
|
||||||
|
defer cancelAlloc()
|
||||||
|
|
||||||
|
// Create a browser context
|
||||||
|
ctx, cancel := chromedp.NewContext(allocCtx)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
var renderedHTML string
|
var renderedHTML string
|
||||||
|
@ -57,6 +61,32 @@ func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string)
|
||||||
return extractParsedDOM(doc)
|
return extractParsedDOM(doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// configureChromeOptions sets up Chrome options and proxy if CrawlerProxy is enabled.
|
||||||
|
func configureChromeOptions() []chromedp.ExecAllocatorOption {
|
||||||
|
options := chromedp.DefaultExecAllocatorOptions[:]
|
||||||
|
|
||||||
|
if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
|
||||||
|
// Retrieve proxy settings from CrawlerProxy
|
||||||
|
proxy := crawlerProxyClient.GetProxy() // Ensure a `GetProxy` method is implemented for your proxy client
|
||||||
|
if proxy != "" {
|
||||||
|
options = append(options, chromedp.ProxyServer(proxy))
|
||||||
|
printDebug("Using CrawlerProxy for Chromedp: %s", proxy)
|
||||||
|
} else {
|
||||||
|
printWarn("CrawlerProxy is enabled but no valid proxy is available")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// // Add additional Chrome
|
||||||
|
// options = append(options,
|
||||||
|
// chromedp.Flag("headless", true),
|
||||||
|
// chromedp.Flag("disable-gpu", true),
|
||||||
|
// chromedp.Flag("no-sandbox", true),
|
||||||
|
// chromedp.Flag("disable-setuid-sandbox", true),
|
||||||
|
// )
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
// extractStandard does the normal HTML parse with OG, Twitter, etc.
|
// extractStandard does the normal HTML parse with OG, Twitter, etc.
|
||||||
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
|
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
|
||||||
client := &http.Client{Timeout: 15 * time.Second}
|
client := &http.Client{Timeout: 15 * time.Second}
|
||||||
|
@ -68,7 +98,13 @@ func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
// Use CrawlerProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
|
||||||
|
resp, err = crawlerProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printDebug("Failed to GET %s: %v", pageURL, err)
|
printDebug("Failed to GET %s: %v", pageURL, err)
|
||||||
return
|
return
|
||||||
|
@ -185,7 +221,13 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
|
||||||
readReq.Header.Set("User-Agent", userAgent)
|
readReq.Header.Set("User-Agent", userAgent)
|
||||||
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||||
|
|
||||||
readResp, err := client.Do(readReq)
|
// Use CrawlerProxy if enabled
|
||||||
|
var readResp *http.Response
|
||||||
|
if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
|
||||||
|
readResp, err = crawlerProxyClient.Do(readReq)
|
||||||
|
} else {
|
||||||
|
readResp, err = client.Do(readReq)
|
||||||
|
}
|
||||||
if err != nil || readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
|
if err != nil || readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printDebug("go-readability GET error for %s: %v", pageURL, err)
|
printDebug("go-readability GET error for %s: %v", pageURL, err)
|
||||||
|
|
|
@ -57,31 +57,40 @@ func (t *ThePirateBay) Search(query string, category string) ([]TorrentResult, e
|
||||||
return []TorrentResult{}, nil
|
return []TorrentResult{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
url := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
|
searchURL := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
|
||||||
|
|
||||||
// User Agent generation
|
// User Agent generation
|
||||||
userAgent, err := GetUserAgent("files-tpb")
|
userAgent, err := GetUserAgent("files-tpb")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error:", err)
|
return nil, fmt.Errorf("error generating User-Agent: %w", err)
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", url, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("error creating request: %w", err)
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
|
||||||
|
// Perform the request using MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
response, err := client.Do(req)
|
resp, err = client.Do(req)
|
||||||
if err != nil {
|
}
|
||||||
return nil, err
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error making request to The Pirate Bay: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
defer response.Body.Close()
|
|
||||||
|
|
||||||
var torrentData []map[string]interface{}
|
var torrentData []map[string]interface{}
|
||||||
if err := json.NewDecoder(response.Body).Decode(&torrentData); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(&torrentData); err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("error decoding response JSON: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var results []TorrentResult
|
var results []TorrentResult
|
||||||
|
|
|
@ -62,18 +62,23 @@ func (tg *TorrentGalaxy) Search(query string, category string) ([]TorrentResult,
|
||||||
// User Agent generation
|
// User Agent generation
|
||||||
userAgent, err := GetUserAgent("files-torrentgalaxy")
|
userAgent, err := GetUserAgent("files-torrentgalaxy")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error:", err)
|
return nil, fmt.Errorf("error generating User-Agent: %w", err)
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("error creating request: %w", err)
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
|
||||||
|
// Perform the request using MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
resp, err := client.Do(req)
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
|
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
|
||||||
}
|
}
|
||||||
|
|
6
files.go
6
files.go
|
@ -88,7 +88,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
// Fetch only if the cache miss occurs and Crawler is enabled
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -102,7 +102,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printDebug("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -117,7 +117,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
|
|
||||||
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||||
// If Crawler is disabled, skip fetching from torrent sites
|
// If Crawler is disabled, skip fetching from torrent sites
|
||||||
if !config.CrawlerEnabled {
|
if !config.MetaSearchEnabled {
|
||||||
printInfo("Crawler is disabled; skipping torrent site fetching.")
|
printInfo("Crawler is disabled; skipping torrent site fetching.")
|
||||||
return []TorrentResult{}
|
return []TorrentResult{}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
||||||
if !config.CrawlerEnabled {
|
if !config.MetaSearchEnabled {
|
||||||
printDebug("Crawler is disabled; skipping forum search.")
|
printDebug("Crawler is disabled; skipping forum search.")
|
||||||
return []ForumSearchResult{}, nil
|
return []ForumSearchResult{}, nil
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
// Fetch only if the cache miss occurs and Crawler is enabled
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -164,7 +164,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printDebug("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
|
|
@ -18,8 +18,27 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
|
||||||
// Build the search URL
|
// Build the search URL
|
||||||
searchURL := buildBingSearchURL(query, page)
|
searchURL := buildBingSearchURL(query, page)
|
||||||
|
|
||||||
// Make the HTTP request
|
// Create the HTTP request
|
||||||
resp, err := http.Get(searchURL)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set User-Agent
|
||||||
|
ImageUserAgent, err := GetUserAgent("Image-Search-Bing")
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", ImageUserAgent)
|
||||||
|
|
||||||
|
// Use MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,15 +87,21 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make the HTTP request with User-Agent header
|
// Create the HTTP request
|
||||||
client := &http.Client{}
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", DeviantArtImageUserAgent)
|
req.Header.Set("User-Agent", DeviantArtImageUserAgent)
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
// Perform the request using MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -182,7 +188,7 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
|
||||||
|
|
||||||
duration := time.Since(startTime)
|
duration := time.Since(startTime)
|
||||||
|
|
||||||
// Check if the number of results is one or less
|
// Check if the number of results is zero
|
||||||
if len(results) == 0 {
|
if len(results) == 0 {
|
||||||
return nil, duration, fmt.Errorf("no images found")
|
return nil, duration, fmt.Errorf("no images found")
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,27 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
var results []ImageSearchResult
|
var results []ImageSearchResult
|
||||||
searchURL := buildImgurSearchURL(query, page)
|
searchURL := buildImgurSearchURL(query, page)
|
||||||
|
|
||||||
resp, err := http.Get(searchURL)
|
// Create the HTTP request
|
||||||
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the User-Agent string
|
||||||
|
imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("getting user-agent: %v", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", imgurUserAgent)
|
||||||
|
|
||||||
|
// Perform the HTTP request with MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -28,6 +48,7 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parse the HTML document
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
|
@ -76,12 +97,35 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
|
|
||||||
duration := time.Since(startTime) // Calculate the duration
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
if len(results) == 0 {
|
||||||
|
return nil, duration, fmt.Errorf("no images found")
|
||||||
|
}
|
||||||
|
|
||||||
return results, duration, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
|
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
|
||||||
func scrapeImageFromImgurPage(pageURL string) string {
|
func scrapeImageFromImgurPage(pageURL string) string {
|
||||||
resp, err := http.Get(pageURL)
|
req, err := http.NewRequest("GET", pageURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error creating request for page: %v\n", err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the User-Agent string
|
||||||
|
imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
|
||||||
|
if err == nil {
|
||||||
|
req.Header.Set("User-Agent", imgurUserAgent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the request using MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("Error fetching page: %v\n", err)
|
fmt.Printf("Error fetching page: %v\n", err)
|
||||||
return ""
|
return ""
|
||||||
|
|
|
@ -97,7 +97,7 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
|
|
||||||
// Ensure count + offset is within acceptable limits
|
// Ensure count + offset is within acceptable limits
|
||||||
if offset+resultsPerPage > 250 {
|
if offset+resultsPerPage > 250 {
|
||||||
return nil, 0, fmt.Errorf("count + offset must be lower than 250 for quant")
|
return nil, 0, fmt.Errorf("count + offset must be lower than 250 for Qwant")
|
||||||
}
|
}
|
||||||
|
|
||||||
if safe == "" {
|
if safe == "" {
|
||||||
|
@ -113,21 +113,27 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
offset,
|
offset,
|
||||||
safe)
|
safe)
|
||||||
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
// Create the HTTP request
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", apiURL, nil)
|
req, err := http.NewRequest("GET", apiURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get the User-Agent string
|
||||||
ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
|
ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, fmt.Errorf("getting user-agent: %v", err)
|
||||||
}
|
}
|
||||||
|
req.Header.Set("User-Agent", ImageUserAgent)
|
||||||
|
|
||||||
req.Header.Set("User-Agent", ImageUserAgent) // Quant seems to not like some specific User-Agent strings
|
// Perform the request with MetaProxy if enabled
|
||||||
|
var resp *http.Response
|
||||||
resp, err := client.Do(req)
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -137,11 +143,13 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parse the API response
|
||||||
var apiResp QwantAPIResponse
|
var apiResp QwantAPIResponse
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
||||||
return nil, 0, fmt.Errorf("decoding response: %v", err)
|
return nil, 0, fmt.Errorf("decoding response: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Process the results
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
|
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
|
||||||
|
|
||||||
|
@ -174,5 +182,9 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
|
|
||||||
duration := time.Since(startTime) // Calculate the duration
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
if len(results) == 0 {
|
||||||
|
return nil, duration, fmt.Errorf("no images found")
|
||||||
|
}
|
||||||
|
|
||||||
return results, duration, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
combinedResults = filterValidImages(combinedResults)
|
combinedResults = filterValidImages(combinedResults)
|
||||||
|
@ -101,7 +101,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printDebug("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
combinedResults = filterValidImages(combinedResults)
|
combinedResults = filterValidImages(combinedResults)
|
||||||
|
@ -118,8 +118,8 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
||||||
var results []ImageSearchResult
|
var results []ImageSearchResult
|
||||||
|
|
||||||
// Check if CrawlerEnabled is false
|
// Check if MetaSearchEnabled is false
|
||||||
if !config.CrawlerEnabled {
|
if !config.MetaSearchEnabled {
|
||||||
printDebug("Crawler is disabled; skipping image search engine fetching.")
|
printDebug("Crawler is disabled; skipping image search engine fetching.")
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
6
init.go
6
init.go
|
@ -60,8 +60,12 @@ func main() {
|
||||||
}
|
}
|
||||||
config.PeerID = hostID
|
config.PeerID = hostID
|
||||||
|
|
||||||
|
if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
|
||||||
|
InitProxies()
|
||||||
|
}
|
||||||
|
|
||||||
// Initiate Browser Agent updater
|
// Initiate Browser Agent updater
|
||||||
if config.CrawlerEnabled || config.IndexerEnabled {
|
if config.MetaSearchEnabled || config.IndexerEnabled {
|
||||||
go periodicAgentUpdate()
|
go periodicAgentUpdate()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
7
node.go
7
node.go
|
@ -5,7 +5,7 @@ import (
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
@ -65,7 +65,10 @@ func sendMessage(serverAddr string, msg Message) error {
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
body, _ := ioutil.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read response body: %v", err)
|
||||||
|
}
|
||||||
return fmt.Errorf("server error: %s", body)
|
return fmt.Errorf("server error: %s", body)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
169
proxy.go
Normal file
169
proxy.go
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/net/proxy"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProxyConfig holds configuration for a single proxy.
|
||||||
|
type ProxyConfig struct {
|
||||||
|
Address string
|
||||||
|
Username string
|
||||||
|
Password string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProxyClient provides an HTTP client pool for proxies.
|
||||||
|
type ProxyClient struct {
|
||||||
|
clients []*http.Client
|
||||||
|
lock sync.Mutex
|
||||||
|
index int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package-level proxy clients
|
||||||
|
var (
|
||||||
|
metaProxyClient *ProxyClient
|
||||||
|
crawlerProxyClient *ProxyClient
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewProxyClientPool creates a pool of HTTP clients with proxies.
|
||||||
|
func NewProxyClientPool(proxies []ProxyConfig, timeout time.Duration) (*ProxyClient, error) {
|
||||||
|
if len(proxies) == 0 {
|
||||||
|
return nil, fmt.Errorf("no proxies provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
clients := make([]*http.Client, len(proxies))
|
||||||
|
|
||||||
|
for i, proxyConfig := range proxies {
|
||||||
|
var auth *proxy.Auth
|
||||||
|
if proxyConfig.Username != "" || proxyConfig.Password != "" {
|
||||||
|
auth = &proxy.Auth{
|
||||||
|
User: proxyConfig.Username,
|
||||||
|
Password: proxyConfig.Password,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dialer, err := proxy.SOCKS5("tcp", proxyConfig.Address, auth, proxy.Direct)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create SOCKS5 dialer for %s: %w", proxyConfig.Address, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
transport := &http.Transport{Dial: dialer.Dial}
|
||||||
|
clients[i] = &http.Client{
|
||||||
|
Transport: transport,
|
||||||
|
Timeout: timeout,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ProxyClient{clients: clients}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do sends an HTTP request using the next proxy in the pool.
|
||||||
|
func (p *ProxyClient) Do(req *http.Request) (*http.Response, error) {
|
||||||
|
p.lock.Lock()
|
||||||
|
client := p.clients[p.index]
|
||||||
|
p.index = (p.index + 1) % len(p.clients)
|
||||||
|
p.lock.Unlock()
|
||||||
|
return client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ProxyClient) GetProxy() string {
|
||||||
|
p.lock.Lock()
|
||||||
|
defer p.lock.Unlock()
|
||||||
|
|
||||||
|
if len(p.clients) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Round-robin proxy retrieval
|
||||||
|
client := p.clients[p.index]
|
||||||
|
p.index = (p.index + 1) % len(p.clients)
|
||||||
|
|
||||||
|
// Assume each client has a proxy string saved
|
||||||
|
// Example implementation depends on how your proxies are configured
|
||||||
|
proxyTransport, ok := client.Transport.(*http.Transport)
|
||||||
|
if ok && proxyTransport.Proxy != nil {
|
||||||
|
proxyURL, _ := proxyTransport.Proxy(nil)
|
||||||
|
if proxyURL != nil {
|
||||||
|
return proxyURL.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseProxies parses the proxy strings in the format ADDRESS:PORT or ADDRESS:PORT:USER:PASSWORD.
|
||||||
|
func ParseProxies(proxyStrings []string) []ProxyConfig {
|
||||||
|
var proxies []ProxyConfig
|
||||||
|
for _, proxy := range proxyStrings {
|
||||||
|
parts := strings.Split(proxy, ":")
|
||||||
|
if len(parts) == 2 { // ADDRESS:PORT
|
||||||
|
proxies = append(proxies, ProxyConfig{
|
||||||
|
Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
|
||||||
|
})
|
||||||
|
} else if len(parts) == 4 { // ADDRESS:PORT:USER:PASSWORD
|
||||||
|
proxies = append(proxies, ProxyConfig{
|
||||||
|
Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
|
||||||
|
Username: parts[2],
|
||||||
|
Password: parts[3],
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Invalid proxy format: %s\n", proxy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return proxies
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitProxies initializes the proxy clients for Meta and Crawler proxies.
|
||||||
|
func InitProxies() {
|
||||||
|
// Initialize Meta Proxy Client
|
||||||
|
if config.MetaProxyEnabled {
|
||||||
|
metaProxies := ParseProxies(config.MetaProxies)
|
||||||
|
client, err := NewProxyClientPool(metaProxies, 30*time.Second)
|
||||||
|
if err != nil {
|
||||||
|
if config.MetaProxyStrict {
|
||||||
|
panic(fmt.Sprintf("Failed to initialize Meta proxies: %v", err))
|
||||||
|
}
|
||||||
|
fmt.Printf("Warning: Meta proxy initialization failed: %v\n", err)
|
||||||
|
}
|
||||||
|
metaProxyClient = client
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize Crawler Proxy Client
|
||||||
|
if config.CrawlerProxyEnabled {
|
||||||
|
crawlerProxies := ParseProxies(config.CrawlerProxies)
|
||||||
|
client, err := NewProxyClientPool(crawlerProxies, 30*time.Second)
|
||||||
|
if err != nil {
|
||||||
|
if config.CrawlerProxyStrict {
|
||||||
|
panic(fmt.Sprintf("Failed to initialize Crawler proxies: %v", err))
|
||||||
|
}
|
||||||
|
fmt.Printf("Warning: Crawler proxy initialization failed: %v\n", err)
|
||||||
|
}
|
||||||
|
crawlerProxyClient = client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// func main() {
|
||||||
|
// config := loadConfig()
|
||||||
|
|
||||||
|
// // Initialize proxies if enabled
|
||||||
|
// if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
|
||||||
|
// InitProxies()
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Example usage
|
||||||
|
// if metaProxyClient != nil {
|
||||||
|
// req, _ := http.NewRequest("GET", "https://example.com", nil)
|
||||||
|
// resp, err := metaProxyClient.Do(req)
|
||||||
|
// if err != nil {
|
||||||
|
// fmt.Printf("Error using MetaProxyClient: %v\n", err)
|
||||||
|
// } else {
|
||||||
|
// fmt.Printf("Meta Proxy Response Status: %s\n", resp.Status)
|
||||||
|
// resp.Body.Close()
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
27
run.bat
27
run.bat
|
@ -5,7 +5,6 @@ rem Initialize variables
|
||||||
set SKIP_CONFIG=""
|
set SKIP_CONFIG=""
|
||||||
set PORT=""
|
set PORT=""
|
||||||
set DOMAIN=""
|
set DOMAIN=""
|
||||||
set BUILD_MODE=false
|
|
||||||
set BUILD_OUTPUT=qgato.exe
|
set BUILD_OUTPUT=qgato.exe
|
||||||
|
|
||||||
rem Parse arguments
|
rem Parse arguments
|
||||||
|
@ -28,11 +27,6 @@ if "%~1"=="--skip-config-check" (
|
||||||
shift
|
shift
|
||||||
goto parse_args
|
goto parse_args
|
||||||
)
|
)
|
||||||
if "%~1"=="--build" (
|
|
||||||
set BUILD_MODE=true
|
|
||||||
shift
|
|
||||||
goto parse_args
|
|
||||||
)
|
|
||||||
echo Unknown argument: %~1
|
echo Unknown argument: %~1
|
||||||
exit /b 1
|
exit /b 1
|
||||||
|
|
||||||
|
@ -50,8 +44,10 @@ for %%f in (*.go) do (
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if "%BUILD_MODE%"=="true" (
|
rem Always delete and rebuild the binary
|
||||||
rem Build mode
|
echo Cleaning previous build...
|
||||||
|
if exist "%BUILD_OUTPUT%" del "%BUILD_OUTPUT%"
|
||||||
|
|
||||||
echo Building application...
|
echo Building application...
|
||||||
go build -o "%BUILD_OUTPUT%" !GO_FILES!
|
go build -o "%BUILD_OUTPUT%" !GO_FILES!
|
||||||
if errorlevel 1 (
|
if errorlevel 1 (
|
||||||
|
@ -59,28 +55,17 @@ if "%BUILD_MODE%"=="true" (
|
||||||
exit /b 1
|
exit /b 1
|
||||||
)
|
)
|
||||||
echo Build successful! Output: %CD%\%BUILD_OUTPUT%
|
echo Build successful! Output: %CD%\%BUILD_OUTPUT%
|
||||||
) else (
|
|
||||||
rem Check if the executable exists
|
|
||||||
if not exist "%BUILD_OUTPUT%" (
|
|
||||||
echo Executable not found. Building it first...
|
|
||||||
go build -o "%BUILD_OUTPUT%" !GO_FILES!
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo Build failed! Unable to run the application.
|
|
||||||
exit /b 1
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
rem Construct the command
|
rem Construct the command
|
||||||
set CMD="%BUILD_OUTPUT% !SKIP_CONFIG!"
|
set CMD=%BUILD_OUTPUT% !SKIP_CONFIG!
|
||||||
if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
|
if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
|
||||||
if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
|
if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
|
||||||
|
|
||||||
rem Informative output
|
rem Informative output
|
||||||
echo Starting application with command: !CMD!
|
echo Starting application with command: !CMD!
|
||||||
|
|
||||||
rem Run the application
|
rem Run the built executable
|
||||||
call !CMD!
|
call !CMD!
|
||||||
)
|
|
||||||
|
|
||||||
rem Return to the original directory
|
rem Return to the original directory
|
||||||
popd
|
popd
|
||||||
|
|
29
run.sh
29
run.sh
|
@ -4,7 +4,6 @@
|
||||||
SKIP_CONFIG=""
|
SKIP_CONFIG=""
|
||||||
PORT=""
|
PORT=""
|
||||||
DOMAIN=""
|
DOMAIN=""
|
||||||
BUILD_MODE=false
|
|
||||||
BUILD_OUTPUT="qgato"
|
BUILD_OUTPUT="qgato"
|
||||||
|
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
|
@ -22,10 +21,6 @@ while [ $# -gt 0 ]; do
|
||||||
SKIP_CONFIG="--skip-config-check"
|
SKIP_CONFIG="--skip-config-check"
|
||||||
shift
|
shift
|
||||||
;;
|
;;
|
||||||
--build)
|
|
||||||
BUILD_MODE=true
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
*)
|
*)
|
||||||
echo "Unknown argument: $1"
|
echo "Unknown argument: $1"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -39,8 +34,10 @@ SCRIPT_DIR=$(dirname "$0")
|
||||||
# List all Go files in the script directory (excluding test files)
|
# List all Go files in the script directory (excluding test files)
|
||||||
GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print)
|
GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print)
|
||||||
|
|
||||||
if $BUILD_MODE; then
|
# Always delete and rebuild the binary
|
||||||
# Build mode
|
echo "Cleaning previous build..."
|
||||||
|
rm -f "$SCRIPT_DIR/$BUILD_OUTPUT"
|
||||||
|
|
||||||
echo "Building application..."
|
echo "Building application..."
|
||||||
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
|
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
|
@ -49,23 +46,13 @@ if $BUILD_MODE; then
|
||||||
echo "Build failed!"
|
echo "Build failed!"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
else
|
|
||||||
# Run mode
|
# Construct the run command
|
||||||
CMD="./$BUILD_OUTPUT $SKIP_CONFIG"
|
CMD="$SCRIPT_DIR/$BUILD_OUTPUT $SKIP_CONFIG"
|
||||||
[ -n "$PORT" ] && CMD="$CMD --port $PORT"
|
[ -n "$PORT" ] && CMD="$CMD --port $PORT"
|
||||||
[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
|
[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
|
||||||
|
|
||||||
if [ ! -f "$SCRIPT_DIR/$BUILD_OUTPUT" ]; then
|
|
||||||
echo "Executable not found. Building it first..."
|
|
||||||
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "Build failed! Unable to run the application."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Starting application with command: $CMD"
|
echo "Starting application with command: $CMD"
|
||||||
|
|
||||||
# Run the executable
|
# Run the built executable
|
||||||
eval $CMD
|
eval $CMD
|
||||||
fi
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package tests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"context"
|
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -10,9 +9,7 @@ import (
|
||||||
"math/big"
|
"math/big"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
|
||||||
"sync"
|
"sync"
|
||||||
"syscall"
|
"syscall"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -56,50 +53,27 @@ func TestApplication(t *testing.T) {
|
||||||
// Ensure the test runs from the root directory
|
// Ensure the test runs from the root directory
|
||||||
rootDir := "../" // Path to the root directory of the repository
|
rootDir := "../" // Path to the root directory of the repository
|
||||||
|
|
||||||
// Build the application using `run.sh --build`
|
// Run the application using `run.sh`
|
||||||
buildCmd := exec.Command("sh", "./run.sh", "--build")
|
runCmd := exec.Command("sh", "./run.sh", "--skip-config-check")
|
||||||
buildCmd.Dir = rootDir
|
runCmd.Dir = rootDir
|
||||||
|
|
||||||
buildOutput, err := buildCmd.CombinedOutput()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to build application: %v\nOutput:\n%s", err, string(buildOutput))
|
|
||||||
}
|
|
||||||
t.Log("Application built successfully")
|
|
||||||
|
|
||||||
// Path to the built executable relative to rootDir
|
|
||||||
executablePath := "./qgato" // Since cmd.Dir is rootDir, this path is relative to rootDir
|
|
||||||
|
|
||||||
// Ensure the executable has execute permissions
|
|
||||||
execFullPath := filepath.Join(rootDir, "qgato")
|
|
||||||
if err := os.Chmod(execFullPath, 0755); err != nil {
|
|
||||||
t.Fatalf("Failed to set execute permissions on the executable: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a context with cancellation
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
|
||||||
defer cancel() // Ensure resources are cleaned up
|
|
||||||
|
|
||||||
// Start the application using the built executable
|
|
||||||
cmd := exec.CommandContext(ctx, executablePath, "--skip-config-check")
|
|
||||||
cmd.Dir = rootDir // Set the working directory to the root directory
|
|
||||||
|
|
||||||
// Set process group ID so we can kill it and its children
|
// Set process group ID so we can kill it and its children
|
||||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
runCmd.SysProcAttr = &syscall.SysProcAttr{
|
||||||
Setpgid: true,
|
Setpgid: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Capture application output for logging
|
// Capture application output for logging
|
||||||
appStdout, err := cmd.StdoutPipe()
|
appStdout, err := runCmd.StdoutPipe()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to capture stdout: %v", err)
|
t.Fatalf("Failed to capture stdout: %v", err)
|
||||||
}
|
}
|
||||||
appStderr, err := cmd.StderrPipe()
|
appStderr, err := runCmd.StderrPipe()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to capture stderr: %v", err)
|
t.Fatalf("Failed to capture stderr: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the application
|
// Start the application
|
||||||
if err := cmd.Start(); err != nil {
|
if err := runCmd.Start(); err != nil {
|
||||||
t.Fatalf("Failed to start application: %v", err)
|
t.Fatalf("Failed to start application: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,28 +94,28 @@ func TestApplication(t *testing.T) {
|
||||||
// Defer cleanup to ensure process is killed after the test
|
// Defer cleanup to ensure process is killed after the test
|
||||||
defer func() {
|
defer func() {
|
||||||
// Kill the process group
|
// Kill the process group
|
||||||
pgid, err := syscall.Getpgid(cmd.Process.Pid)
|
pgid, err := syscall.Getpgid(runCmd.Process.Pid)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
syscall.Kill(-pgid, syscall.SIGKILL)
|
syscall.Kill(-pgid, syscall.SIGKILL)
|
||||||
} else {
|
} else {
|
||||||
t.Logf("Failed to get process group ID: %v", err)
|
t.Logf("Failed to get process group ID: %v", err)
|
||||||
cmd.Process.Kill()
|
runCmd.Process.Kill()
|
||||||
}
|
}
|
||||||
cmd.Wait()
|
runCmd.Wait()
|
||||||
|
|
||||||
// Print summary
|
// Print summary
|
||||||
printSummary(summary, t)
|
printSummary(summary, t)
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Wait for the server to start
|
// Wait for the server to start
|
||||||
if !waitForServer("http://localhost:5000", 15*time.Second) {
|
if !waitForServer("http://localhost:5000", 600*time.Second) {
|
||||||
t.Fatalf("Server did not start within the expected time")
|
t.Fatalf("Server did not start within the expected time")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Log("Application is running")
|
t.Log("Application is running")
|
||||||
|
|
||||||
// Create a process instance for the application
|
// Create a process instance for the application
|
||||||
appProcess, err := process.NewProcess(int32(cmd.Process.Pid))
|
appProcess, err := process.NewProcess(int32(runCmd.Process.Pid))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to create process instance: %v", err)
|
t.Fatalf("Failed to create process instance: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -21,33 +21,45 @@ func PerformBraveTextSearch(query, safe, lang string, offset int) ([]TextSearchR
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printWarn("Error creating request: %v", err)
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set headers including User-Agent
|
// Set headers including User-Agent
|
||||||
TextUserAgent, err := GetUserAgent("Text-Search")
|
TextUserAgent, err := GetUserAgent("Text-Search")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printWarn("Error generating User-Agent: %v", err)
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", TextUserAgent)
|
req.Header.Set("User-Agent", TextUserAgent)
|
||||||
|
|
||||||
// Perform the HTTP request
|
var resp *http.Response
|
||||||
|
|
||||||
|
// Determine whether to use a proxy client or a default client
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
resp, err := client.Do(req)
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printWarn("Error performing request: %v", err)
|
||||||
return nil, 0, fmt.Errorf("performing request: %v", err)
|
return nil, 0, fmt.Errorf("performing request: %v", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
// Read the response body
|
// Read the response body
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printWarn("Error reading response body: %v", err)
|
||||||
return nil, 0, fmt.Errorf("reading response body: %v", err)
|
return nil, 0, fmt.Errorf("reading response body: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the response body
|
// Parse the response body
|
||||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
|
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printErr("Error parsing response body: %v", err)
|
||||||
return nil, 0, fmt.Errorf("parsing response body: %v", err)
|
return nil, 0, fmt.Errorf("parsing response body: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,8 +83,10 @@ func PerformBraveTextSearch(query, safe, lang string, offset int) ([]TextSearchR
|
||||||
|
|
||||||
// Return an error if no results are found
|
// Return an error if no results are found
|
||||||
if len(results) == 0 {
|
if len(results) == 0 {
|
||||||
|
printDebug("No results found for query: %s", query)
|
||||||
return nil, duration, fmt.Errorf("no results found")
|
return nil, duration, fmt.Errorf("no results found")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
printDebug("Search completed successfully for query: %s, found %d results", query, len(results))
|
||||||
return results, duration, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,21 +16,38 @@ func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSear
|
||||||
var results []TextSearchResult
|
var results []TextSearchResult
|
||||||
searchURL := buildDuckDuckGoSearchURL(query, page)
|
searchURL := buildDuckDuckGoSearchURL(query, page)
|
||||||
|
|
||||||
resp, err := http.Get(searchURL)
|
// Create a request
|
||||||
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use proxy client if MetaProxy is enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// Check for HTTP status code
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parse HTML response
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract results from the page
|
||||||
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
|
||||||
header := s.Find(".result__a").Text()
|
header := s.Find(".result__a").Text()
|
||||||
description := s.Find(".result__snippet").Text()
|
description := s.Find(".result__snippet").Text()
|
||||||
|
|
|
@ -16,37 +16,48 @@ func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchRe
|
||||||
|
|
||||||
startTime := time.Now() // Start the timer
|
startTime := time.Now() // Start the timer
|
||||||
|
|
||||||
client := &http.Client{}
|
// Build the search URL
|
||||||
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
|
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
|
||||||
|
|
||||||
|
// Create a new request
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("failed to create request: %v", err)
|
return nil, 0, fmt.Errorf("failed to create request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// User Agent generation
|
// Generate and set User-Agent header
|
||||||
TextUserAgent, err := GetUserAgent("Text-Search")
|
TextUserAgent, err := GetUserAgent("Google-Text-Search")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
req.Header.Set("User-Agent", TextUserAgent)
|
req.Header.Set("User-Agent", TextUserAgent)
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
// Perform the request using proxy if MetaProxy is enabled
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// Check for HTTP status code
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parse the HTML response
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract search results
|
||||||
results = parseResults(doc)
|
results = parseResults(doc)
|
||||||
|
|
||||||
duration := time.Since(startTime) // Calculate the duration
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
|
@ -28,7 +28,7 @@ func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchRe
|
||||||
|
|
||||||
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
|
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
|
||||||
|
|
||||||
// User Agent generation
|
// Generate User-Agent
|
||||||
userAgent, err := GetUserAgent("librex-text-search")
|
userAgent, err := GetUserAgent("librex-text-search")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
|
@ -40,8 +40,15 @@ func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchRe
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
|
||||||
|
// Perform the request using the appropriate client
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
resp, err := client.Do(req)
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, logError("error making request to LibreX", err)
|
return nil, 0, logError("error making request to LibreX", err)
|
||||||
}
|
}
|
||||||
|
@ -77,7 +84,7 @@ func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchRe
|
||||||
return results, duration, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is just stupid it will probbably lead to printing error twice
|
// This is just stupid it will probably lead to printing error twice
|
||||||
func logError(message string, err error) error {
|
func logError(message string, err error) error {
|
||||||
log.Printf("%s: %v", message, err)
|
log.Printf("%s: %v", message, err)
|
||||||
return fmt.Errorf("%s: %w", message, err)
|
return fmt.Errorf("%s: %w", message, err)
|
||||||
|
|
|
@ -46,8 +46,6 @@ func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error
|
||||||
lang,
|
lang,
|
||||||
offset)
|
offset)
|
||||||
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", apiURL, nil)
|
req, err := http.NewRequest("GET", apiURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("creating request: %v", err)
|
return nil, fmt.Errorf("creating request: %v", err)
|
||||||
|
@ -55,7 +53,15 @@ func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error
|
||||||
|
|
||||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36")
|
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36")
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
// Perform the request using the appropriate client
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("making request: %v", err)
|
return nil, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,31 +3,63 @@ package main
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"log"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Instance struct {
|
type Instance struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"-"` // Populated from map key
|
||||||
Status int `json:"status"`
|
Analytics bool `json:"analytics"`
|
||||||
SSLGrade string `json:"ssl_grade"`
|
Comments []string `json:"comments"`
|
||||||
|
AlternativeUrls map[string]interface{} `json:"alternativeUrls"`
|
||||||
|
Main bool `json:"main"`
|
||||||
|
NetworkType string `json:"network_type"`
|
||||||
|
HTTP struct {
|
||||||
|
StatusCode int `json:"status_code"`
|
||||||
|
Error string `json:"error"`
|
||||||
|
} `json:"http"`
|
||||||
|
Version string `json:"version"`
|
||||||
|
Grade string `json:"grade"`
|
||||||
|
GradeURL string `json:"gradeUrl"`
|
||||||
|
Generator string `json:"generator"`
|
||||||
|
ContactURL FlexibleType `json:"contact_url"` // Custom type
|
||||||
|
DocsURL string `json:"docs_url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FlexibleType struct {
|
||||||
|
StringValue string
|
||||||
|
BoolValue bool
|
||||||
|
IsString bool
|
||||||
}
|
}
|
||||||
|
|
||||||
const searxInstancesURL = "https://searx.space/data/instances.json"
|
const searxInstancesURL = "https://searx.space/data/instances.json"
|
||||||
|
|
||||||
|
// FetchInstances fetches available SearX instances from the registry.
|
||||||
func fetchInstances() ([]Instance, error) {
|
func fetchInstances() ([]Instance, error) {
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
|
||||||
req, err := http.NewRequest("GET", searxInstancesURL, nil)
|
req, err := http.NewRequest("GET", searxInstancesURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("creating request: %v", err)
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("generating User-Agent: %v", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", XNGUserAgent)
|
||||||
|
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && config.MetaProxyStrict && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("performing request: %v", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
@ -35,44 +67,105 @@ func fetchInstances() ([]Instance, error) {
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("reading response body: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var instances []Instance
|
// Root structure of the JSON response
|
||||||
err = json.Unmarshal(body, &instances)
|
var root struct {
|
||||||
|
Instances map[string]Instance `json:"instances"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal JSON into the root structure
|
||||||
|
err = json.Unmarshal(body, &root)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("parsing response JSON: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect instances into a slice
|
||||||
|
var instances []Instance
|
||||||
|
for url, instance := range root.Instances {
|
||||||
|
instance.URL = url // Assign the URL from the map key
|
||||||
|
instances = append(instances, instance)
|
||||||
}
|
}
|
||||||
|
|
||||||
return instances, nil
|
return instances, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UnmarshalJSON implements custom unmarshalling for FlexibleType.
|
||||||
|
func (f *FlexibleType) UnmarshalJSON(data []byte) error {
|
||||||
|
// Try to unmarshal as a string
|
||||||
|
var str string
|
||||||
|
if err := json.Unmarshal(data, &str); err == nil {
|
||||||
|
f.StringValue = str
|
||||||
|
f.IsString = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to unmarshal as a bool
|
||||||
|
var b bool
|
||||||
|
if err := json.Unmarshal(data, &b); err == nil {
|
||||||
|
f.BoolValue = b
|
||||||
|
f.IsString = false
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an error if neither works
|
||||||
|
return fmt.Errorf("invalid FlexibleType: %s", string(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the string representation of FlexibleType.
|
||||||
|
func (f FlexibleType) String() string {
|
||||||
|
if f.IsString {
|
||||||
|
return f.StringValue
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%v", f.BoolValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateInstance checks if a SearX instance is valid by performing a test query.
|
||||||
func validateInstance(instance Instance) bool {
|
func validateInstance(instance Instance) bool {
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
// Skip .onion instances
|
||||||
req, err := http.NewRequest("GET", fmt.Sprintf("%s/search?q=test&categories=general&language=en&safe_search=1&page=1&format=json", instance.URL), nil)
|
if strings.Contains(instance.URL, ".onion") {
|
||||||
if err != nil {
|
printDebug("Skipping .onion instance: %s", instance.URL)
|
||||||
log.Printf("Error creating request for URL: %s, Error: %v", instance.URL, err)
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36")
|
|
||||||
|
client := &http.Client{
|
||||||
|
Timeout: 10 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
testURL := fmt.Sprintf("%s/search?q=test&categories=general&language=en&safe_search=1&page=1&format=json", instance.URL)
|
||||||
|
req, err := http.NewRequest("GET", testURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
printDebug("Error creating SearchXNG request for instance validation: %v", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
|
||||||
|
if err != nil {
|
||||||
|
printWarn("Error generating User-Agent: %v", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", XNGUserAgent)
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Error performing request for URL: %s, Error: %v", instance.URL, err)
|
printDebug("Error performing request for SearchXNG instance validation: %v", err)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
log.Printf("Instance validation failed for URL: %s, StatusCode: %d", instance.URL, resp.StatusCode)
|
printDebug("SearchXNG Instance validation failed. StatusCode: %d", resp.StatusCode)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Successful validation
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetValidInstance fetches and validates SearX instances, returning a valid one.
|
||||||
func getValidInstance() (*Instance, error) {
|
func getValidInstance() (*Instance, error) {
|
||||||
instances, err := fetchInstances()
|
instances, err := fetchInstances()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -88,11 +181,81 @@ func getValidInstance() (*Instance, error) {
|
||||||
return nil, fmt.Errorf("no valid SearX instances found")
|
return nil, fmt.Errorf("no valid SearX instances found")
|
||||||
}
|
}
|
||||||
|
|
||||||
// func main() {
|
// PerformSearXTextSearch performs a text search using a SearX instance.
|
||||||
// instance, err := getValidInstance()
|
func PerformSearXTextSearch(query, categories, language string, page int) ([]TextSearchResult, time.Duration, error) {
|
||||||
// if err != nil {
|
// Default value for "safe" search
|
||||||
// log.Fatalf("Failed to get a valid SearX instance: %v", err)
|
safe := "1"
|
||||||
// }
|
|
||||||
|
|
||||||
// log.Printf("Selected SearX instance: %s", instance.URL)
|
startTime := time.Now() // Start the timer
|
||||||
// }
|
var results []TextSearchResult
|
||||||
|
|
||||||
|
instance, err := getValidInstance()
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to get a valid SearX instance: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
searchURL := fmt.Sprintf("%s/search?q=%s&categories=%s&language=%s&safe_search=%s&page=%d&format=json",
|
||||||
|
instance.URL, url.QueryEscape(query), categories, language, safe, page)
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
XNGUserAgent, err := GetUserAgent("Text-Search-XNG")
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", XNGUserAgent)
|
||||||
|
|
||||||
|
var resp *http.Response
|
||||||
|
if config.MetaProxyEnabled && metaProxyClient != nil {
|
||||||
|
resp, err = metaProxyClient.Do(req)
|
||||||
|
} else {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("performing request: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("reading response body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the JSON response to extract search results
|
||||||
|
var response map[string]interface{}
|
||||||
|
err = json.Unmarshal(body, &response)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("parsing response JSON: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract search results
|
||||||
|
if items, ok := response["results"].([]interface{}); ok {
|
||||||
|
for _, item := range items {
|
||||||
|
if result, ok := item.(map[string]interface{}); ok {
|
||||||
|
title := strings.TrimSpace(fmt.Sprintf("%v", result["title"]))
|
||||||
|
url := strings.TrimSpace(fmt.Sprintf("%v", result["url"]))
|
||||||
|
description := strings.TrimSpace(fmt.Sprintf("%v", result["content"]))
|
||||||
|
|
||||||
|
results = append(results, TextSearchResult{
|
||||||
|
Header: title,
|
||||||
|
URL: url,
|
||||||
|
Description: description,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
if len(results) == 0 {
|
||||||
|
printDebug("No results found for query: %s", query)
|
||||||
|
return nil, duration, fmt.Errorf("no results found")
|
||||||
|
}
|
||||||
|
|
||||||
|
printDebug("Search completed successfully for query: %s, found %d results", query, len(results))
|
||||||
|
return results, duration, nil
|
||||||
|
}
|
||||||
|
|
6
text.go
6
text.go
|
@ -14,7 +14,7 @@ func init() {
|
||||||
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
|
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
|
||||||
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
|
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
|
||||||
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
|
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
|
||||||
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXTextSearch)}, // Always says StatusCode: 429
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ func prefetchPage(query, safe, lang string, page int) {
|
||||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
|
||||||
if _, exists := resultsCache.Get(cacheKey); !exists {
|
if _, exists := resultsCache.Get(cacheKey); !exists {
|
||||||
printInfo("Page %d not cached, caching now...", page)
|
printInfo("Page %d not cached, caching now...", page)
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
pageResults := fetchTextResults(query, safe, lang, page)
|
pageResults := fetchTextResults(query, safe, lang, page)
|
||||||
if len(pageResults) > 0 {
|
if len(pageResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
||||||
|
@ -114,7 +114,7 @@ func prefetchPage(query, safe, lang string, page int) {
|
||||||
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||||
var results []TextSearchResult
|
var results []TextSearchResult
|
||||||
|
|
||||||
if !config.CrawlerEnabled {
|
if !config.MetaSearchEnabled {
|
||||||
printDebug("Crawler is disabled; fetching from local index.")
|
printDebug("Crawler is disabled; fetching from local index.")
|
||||||
|
|
||||||
// Calculate the starting position based on the page number
|
// Calculate the starting position based on the page number
|
||||||
|
|
|
@ -72,8 +72,9 @@ func saveUserSettings(w http.ResponseWriter, settings UserSettings) {
|
||||||
Value: cd.GetValue(settings),
|
Value: cd.GetValue(settings),
|
||||||
Path: "/",
|
Path: "/",
|
||||||
Expires: expiration,
|
Expires: expiration,
|
||||||
Secure: true,
|
Secure: true, // Ensure HTTPS is required
|
||||||
SameSite: http.SameSiteStrictMode,
|
HttpOnly: true,
|
||||||
|
SameSite: http.SameSiteStrictMode, // Restrict cross-site usage
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
6
video.go
6
video.go
|
@ -151,7 +151,7 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
var results []VideoResult
|
var results []VideoResult
|
||||||
if config.CrawlerEnabled {
|
if config.MetaSearchEnabled {
|
||||||
results = fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
|
results = fetchVideoResults(query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,12 +184,12 @@ func handleVideoSearch(w http.ResponseWriter, settings UserSettings, query strin
|
||||||
|
|
||||||
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
|
func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
|
||||||
// Check if the crawler is enabled
|
// Check if the crawler is enabled
|
||||||
if !config.CrawlerEnabled {
|
if !config.MetaSearchEnabled {
|
||||||
printDebug("Crawler is disabled; skipping video search.")
|
printDebug("Crawler is disabled; skipping video search.")
|
||||||
return []VideoResult{}
|
return []VideoResult{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Proceed with Piped API request if CrawlerEnabled
|
// Proceed with Piped API request if MetaSearchEnabled
|
||||||
apiResp, err := makeHTMLRequest(query, safe, lang, page)
|
apiResp, err := makeHTMLRequest(query, safe, lang, page)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error fetching video results: %v", err)
|
printWarn("Error fetching video results: %v", err)
|
||||||
|
|
Loading…
Add table
Reference in a new issue