2024-05-16 18:29:26 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2024-05-17 14:26:28 +02:00
|
|
|
"fmt"
|
|
|
|
"html/template"
|
|
|
|
"log"
|
2024-05-16 18:29:26 +02:00
|
|
|
"net/http"
|
2024-05-17 14:26:28 +02:00
|
|
|
"time"
|
2024-05-16 18:29:26 +02:00
|
|
|
)
|
|
|
|
|
2024-06-14 17:56:20 +02:00
|
|
|
var textSearchEngines []SearchEngine
|
2024-06-09 21:44:49 +02:00
|
|
|
|
2024-05-18 01:59:29 +02:00
|
|
|
func init() {
|
2024-06-14 17:56:20 +02:00
|
|
|
textSearchEngines = []SearchEngine{
|
|
|
|
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
|
|
|
|
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
|
2024-06-15 18:12:01 +02:00
|
|
|
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
|
2024-06-29 21:27:48 +02:00
|
|
|
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5},
|
2024-06-14 17:56:20 +02:00
|
|
|
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
2024-06-09 21:44:49 +02:00
|
|
|
}
|
2024-05-18 01:59:29 +02:00
|
|
|
}
|
2024-05-17 14:26:28 +02:00
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
2024-05-18 01:59:29 +02:00
|
|
|
startTime := time.Now()
|
2024-05-19 22:57:23 +02:00
|
|
|
|
2024-05-24 14:07:16 +02:00
|
|
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
2024-06-09 21:44:49 +02:00
|
|
|
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
|
2024-05-21 08:48:09 +02:00
|
|
|
|
2024-08-08 23:37:58 +02:00
|
|
|
hasPrevPage := page > 1 // dupe
|
2024-05-21 08:48:09 +02:00
|
|
|
|
2024-08-08 23:37:58 +02:00
|
|
|
//displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
2024-05-21 08:48:09 +02:00
|
|
|
|
2024-06-09 21:44:49 +02:00
|
|
|
// Prefetch next and previous pages
|
|
|
|
go prefetchPage(query, safe, lang, page+1)
|
|
|
|
if hasPrevPage {
|
|
|
|
go prefetchPage(query, safe, lang, page-1)
|
2024-05-21 10:19:40 +02:00
|
|
|
}
|
2024-08-08 23:37:58 +02:00
|
|
|
|
|
|
|
elapsedTime := time.Since(startTime)
|
|
|
|
tmpl, err := template.New("text.html").Funcs(funcs).ParseFiles("templates/text.html")
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Error parsing template: %v", err)
|
|
|
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
data := struct {
|
|
|
|
Results []TextSearchResult
|
|
|
|
Query string
|
|
|
|
Page int
|
|
|
|
Fetched string
|
|
|
|
LanguageOptions []LanguageOption
|
|
|
|
CurrentLang string
|
|
|
|
HasPrevPage bool
|
|
|
|
HasNextPage bool
|
|
|
|
NoResults bool
|
|
|
|
}{
|
|
|
|
Results: combinedResults,
|
|
|
|
Query: query,
|
|
|
|
Page: page,
|
|
|
|
Fetched: fmt.Sprintf("%.2f seconds", elapsedTime.Seconds()),
|
|
|
|
LanguageOptions: languageOptions,
|
|
|
|
CurrentLang: lang,
|
|
|
|
HasPrevPage: page > 1,
|
|
|
|
HasNextPage: len(combinedResults) >= 50,
|
|
|
|
NoResults: len(combinedResults) == 0,
|
|
|
|
}
|
|
|
|
|
|
|
|
err = tmpl.Execute(w, data)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Error executing template: %v", err)
|
|
|
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
|
|
|
}
|
2024-05-21 08:48:09 +02:00
|
|
|
}
|
|
|
|
|
2024-06-09 21:44:49 +02:00
|
|
|
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
|
2024-05-24 14:07:16 +02:00
|
|
|
cacheChan := make(chan []SearchResult)
|
2024-05-20 22:14:48 +02:00
|
|
|
var combinedResults []TextSearchResult
|
2024-05-19 22:57:23 +02:00
|
|
|
|
2024-05-20 22:14:48 +02:00
|
|
|
go func() {
|
|
|
|
results, exists := resultsCache.Get(cacheKey)
|
|
|
|
if exists {
|
|
|
|
log.Println("Cache hit")
|
|
|
|
cacheChan <- results
|
|
|
|
} else {
|
|
|
|
log.Println("Cache miss")
|
|
|
|
cacheChan <- nil
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
select {
|
2024-05-24 14:07:16 +02:00
|
|
|
case results := <-cacheChan:
|
|
|
|
if results == nil {
|
2024-06-09 21:44:49 +02:00
|
|
|
combinedResults = fetchTextResults(query, safe, lang, page)
|
2024-06-12 14:51:45 +02:00
|
|
|
if len(combinedResults) > 0 {
|
|
|
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
|
|
|
}
|
2024-05-24 14:07:16 +02:00
|
|
|
} else {
|
|
|
|
textResults, _, _ := convertToSpecificResults(results)
|
|
|
|
combinedResults = textResults
|
2024-05-20 22:14:48 +02:00
|
|
|
}
|
|
|
|
case <-time.After(2 * time.Second):
|
|
|
|
log.Println("Cache check timeout")
|
2024-06-09 21:44:49 +02:00
|
|
|
combinedResults = fetchTextResults(query, safe, lang, page)
|
2024-06-12 14:51:45 +02:00
|
|
|
if len(combinedResults) > 0 {
|
|
|
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
|
|
|
}
|
2024-05-19 22:57:23 +02:00
|
|
|
}
|
|
|
|
|
2024-05-21 08:48:09 +02:00
|
|
|
return combinedResults
|
|
|
|
}
|
|
|
|
|
2024-06-09 21:44:49 +02:00
|
|
|
func prefetchPage(query, safe, lang string, page int) {
|
2024-05-24 14:07:16 +02:00
|
|
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
2024-05-21 08:48:09 +02:00
|
|
|
if _, exists := resultsCache.Get(cacheKey); !exists {
|
2024-06-09 21:44:49 +02:00
|
|
|
log.Printf("Page %d not cached, caching now...", page)
|
|
|
|
pageResults := fetchTextResults(query, safe, lang, page)
|
2024-06-12 14:51:45 +02:00
|
|
|
if len(pageResults) > 0 {
|
|
|
|
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
|
|
|
}
|
2024-05-21 08:48:09 +02:00
|
|
|
} else {
|
2024-06-09 21:44:49 +02:00
|
|
|
log.Printf("Page %d already cached", page)
|
2024-05-20 22:14:48 +02:00
|
|
|
}
|
2024-05-21 08:48:09 +02:00
|
|
|
}
|
2024-05-19 22:57:23 +02:00
|
|
|
|
2024-06-09 21:44:49 +02:00
|
|
|
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
2024-06-12 14:51:45 +02:00
|
|
|
var results []TextSearchResult
|
2024-05-19 22:57:23 +02:00
|
|
|
|
2024-06-15 23:53:03 +02:00
|
|
|
for _, engine := range textSearchEngines {
|
2024-06-12 14:51:45 +02:00
|
|
|
log.Printf("Using search engine: %s", engine.Name)
|
|
|
|
|
2024-06-15 23:53:03 +02:00
|
|
|
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
2024-06-14 17:56:20 +02:00
|
|
|
updateEngineMetrics(&engine, duration, err == nil)
|
2024-06-12 14:51:45 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("Error performing search with %s: %v", engine.Name, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2024-06-16 01:30:18 +02:00
|
|
|
results = append(results, validateResults(searchResults)...)
|
2024-06-14 17:56:20 +02:00
|
|
|
|
2024-06-16 00:14:21 +02:00
|
|
|
// If results are found, break out of the loop
|
2024-06-12 14:51:45 +02:00
|
|
|
if len(results) > 0 {
|
|
|
|
break
|
|
|
|
}
|
2024-05-21 08:48:09 +02:00
|
|
|
}
|
|
|
|
|
2024-08-08 23:37:58 +02:00
|
|
|
// If no results found after trying all engines
|
|
|
|
if len(results) == 0 {
|
|
|
|
log.Printf("No text results found for query: %s, trying other nodes", query)
|
2024-08-09 12:59:37 +02:00
|
|
|
results = tryOtherNodesForTextSearch(query, safe, lang, page, []string{hostID})
|
2024-08-08 23:37:58 +02:00
|
|
|
}
|
|
|
|
|
2024-06-09 21:44:49 +02:00
|
|
|
return results
|
2024-05-19 22:57:23 +02:00
|
|
|
}
|
|
|
|
|
2024-06-16 01:30:18 +02:00
|
|
|
func validateResults(searchResults []SearchResult) []TextSearchResult {
|
|
|
|
var validResults []TextSearchResult
|
|
|
|
|
|
|
|
// Remove anything that is missing a URL or Header
|
|
|
|
for _, result := range searchResults {
|
|
|
|
textResult := result.(TextSearchResult)
|
|
|
|
if textResult.URL != "" || textResult.Header != "" {
|
|
|
|
validResults = append(validResults, textResult)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return validResults
|
|
|
|
}
|
|
|
|
|
2024-06-14 17:56:20 +02:00
|
|
|
func wrapTextSearchFunc(f func(string, string, string, int) ([]TextSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
|
|
|
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
|
|
|
textResults, duration, err := f(query, safe, lang, page)
|
|
|
|
if err != nil {
|
|
|
|
return nil, duration, err
|
|
|
|
}
|
|
|
|
searchResults := make([]SearchResult, len(textResults))
|
|
|
|
for i, result := range textResults {
|
|
|
|
searchResults[i] = result
|
2024-06-09 21:44:49 +02:00
|
|
|
}
|
2024-06-14 17:56:20 +02:00
|
|
|
return searchResults, duration, nil
|
2024-05-17 14:26:28 +02:00
|
|
|
}
|
2024-05-21 08:48:09 +02:00
|
|
|
}
|