2024-05-16 18:29:26 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2024-05-18 01:59:29 +02:00
|
|
|
"flag"
|
2024-05-17 14:26:28 +02:00
|
|
|
"fmt"
|
|
|
|
"html/template"
|
|
|
|
"log"
|
2024-05-16 18:29:26 +02:00
|
|
|
"net/http"
|
2024-05-17 14:26:28 +02:00
|
|
|
"sort"
|
2024-05-18 01:59:29 +02:00
|
|
|
"sync"
|
2024-05-17 14:26:28 +02:00
|
|
|
"time"
|
2024-05-16 18:29:26 +02:00
|
|
|
)
|
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
var (
|
|
|
|
debugMode bool
|
2024-05-20 22:14:48 +02:00
|
|
|
resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
|
2024-05-19 22:57:23 +02:00
|
|
|
)
|
2024-05-18 01:59:29 +02:00
|
|
|
|
|
|
|
func init() {
|
|
|
|
flag.BoolVar(&debugMode, "debug", false, "enable debug mode")
|
|
|
|
flag.Parse()
|
|
|
|
}
|
2024-05-17 14:26:28 +02:00
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
2024-05-18 01:59:29 +02:00
|
|
|
startTime := time.Now()
|
2024-05-19 22:57:23 +02:00
|
|
|
const resultsPerPage = 10
|
|
|
|
|
|
|
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe, Lang: lang}
|
2024-05-20 22:14:48 +02:00
|
|
|
cacheChan := make(chan []TextSearchResult)
|
|
|
|
var combinedResults []TextSearchResult
|
|
|
|
var fromCache bool
|
2024-05-19 22:57:23 +02:00
|
|
|
|
2024-05-20 22:14:48 +02:00
|
|
|
go func() {
|
|
|
|
results, exists := resultsCache.Get(cacheKey)
|
|
|
|
if exists {
|
|
|
|
log.Println("Cache hit")
|
|
|
|
cacheChan <- results
|
|
|
|
} else {
|
|
|
|
log.Println("Cache miss")
|
|
|
|
cacheChan <- nil
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
select {
|
|
|
|
case combinedResults = <-cacheChan:
|
|
|
|
if combinedResults != nil {
|
|
|
|
fromCache = true
|
|
|
|
} else {
|
|
|
|
combinedResults = fetchAndCacheResults(query, safe, lang, page, resultsPerPage)
|
|
|
|
resultsCache.Set(cacheKey, combinedResults)
|
|
|
|
}
|
|
|
|
case <-time.After(2 * time.Second):
|
|
|
|
log.Println("Cache check timeout")
|
2024-05-19 22:57:23 +02:00
|
|
|
combinedResults = fetchAndCacheResults(query, safe, lang, page, resultsPerPage)
|
|
|
|
resultsCache.Set(cacheKey, combinedResults)
|
|
|
|
}
|
|
|
|
|
2024-05-20 22:14:48 +02:00
|
|
|
// Only pre-fetch and cache results for the next page if we fetched new results
|
|
|
|
if !fromCache {
|
|
|
|
go func() {
|
|
|
|
nextPageResults := fetchAndCacheResults(query, safe, lang, page+1, resultsPerPage)
|
|
|
|
resultsCache.Set(CacheKey{Query: query, Page: page + 1, Safe: safe, Lang: lang}, nextPageResults)
|
|
|
|
}()
|
|
|
|
}
|
2024-05-19 22:57:23 +02:00
|
|
|
|
|
|
|
hasPrevPage := page > 1
|
2024-05-20 22:14:48 +02:00
|
|
|
hasNextPage := len(combinedResults) == resultsPerPage
|
2024-05-19 22:57:23 +02:00
|
|
|
|
|
|
|
displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
|
|
|
}
|
|
|
|
|
|
|
|
func fetchAndCacheResults(query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
|
2024-05-18 01:59:29 +02:00
|
|
|
var combinedResults []TextSearchResult
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
var mu sync.Mutex
|
|
|
|
|
|
|
|
resultsChan := make(chan []TextSearchResult)
|
|
|
|
|
|
|
|
searchFuncs := []struct {
|
2024-05-19 22:57:23 +02:00
|
|
|
Func func(string, string, string, int) ([]TextSearchResult, error)
|
2024-05-18 01:59:29 +02:00
|
|
|
Source string
|
|
|
|
}{
|
|
|
|
{PerformGoogleTextSearch, "Google"},
|
|
|
|
{PerformDuckDuckGoTextSearch, "DuckDuckGo"},
|
|
|
|
{PerformQwantTextSearch, "Qwant"},
|
2024-05-17 14:26:28 +02:00
|
|
|
}
|
|
|
|
|
2024-05-18 01:59:29 +02:00
|
|
|
wg.Add(len(searchFuncs))
|
2024-05-17 14:26:28 +02:00
|
|
|
|
2024-05-18 01:59:29 +02:00
|
|
|
for _, searchFunc := range searchFuncs {
|
2024-05-19 22:57:23 +02:00
|
|
|
go func(searchFunc func(string, string, string, int) ([]TextSearchResult, error), source string) {
|
2024-05-18 01:59:29 +02:00
|
|
|
defer wg.Done()
|
2024-05-19 22:57:23 +02:00
|
|
|
results, err := searchFunc(query, safe, lang, page)
|
2024-05-18 01:59:29 +02:00
|
|
|
if err == nil {
|
|
|
|
for i := range results {
|
|
|
|
results[i].Source = source
|
|
|
|
}
|
|
|
|
resultsChan <- results
|
|
|
|
} else {
|
|
|
|
log.Printf("Error performing search from %s: %v", source, err)
|
|
|
|
}
|
|
|
|
}(searchFunc.Func, searchFunc.Source)
|
2024-05-17 14:26:28 +02:00
|
|
|
}
|
|
|
|
|
2024-05-18 01:59:29 +02:00
|
|
|
go func() {
|
|
|
|
wg.Wait()
|
|
|
|
close(resultsChan)
|
|
|
|
}()
|
|
|
|
|
|
|
|
for results := range resultsChan {
|
|
|
|
mu.Lock()
|
2024-05-19 22:57:23 +02:00
|
|
|
combinedResults = append(combinedResults, results...)
|
2024-05-18 01:59:29 +02:00
|
|
|
mu.Unlock()
|
2024-05-17 14:26:28 +02:00
|
|
|
}
|
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
// Sort combinedResults by source priority: Google first, DuckDuckGo second, Qwant third
|
2024-05-17 14:26:28 +02:00
|
|
|
sort.SliceStable(combinedResults, func(i, j int) bool {
|
2024-05-18 01:59:29 +02:00
|
|
|
return sourceOrder(combinedResults[i].Source) < sourceOrder(combinedResults[j].Source)
|
2024-05-17 14:26:28 +02:00
|
|
|
})
|
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
// Paginate results
|
|
|
|
startIndex := (page - 1) * resultsPerPage
|
|
|
|
endIndex := startIndex + resultsPerPage
|
2024-05-18 01:59:29 +02:00
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
// Ensure startIndex and endIndex are within bounds
|
|
|
|
if startIndex >= len(combinedResults) {
|
|
|
|
return []TextSearchResult{}
|
|
|
|
}
|
|
|
|
if endIndex > len(combinedResults) {
|
|
|
|
endIndex = len(combinedResults)
|
|
|
|
}
|
|
|
|
|
|
|
|
return combinedResults[startIndex:endIndex]
|
2024-05-18 01:59:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func sourceOrder(source string) int {
|
|
|
|
switch source {
|
|
|
|
case "Google":
|
2024-05-18 13:23:39 +02:00
|
|
|
return 1
|
2024-05-19 22:57:23 +02:00
|
|
|
case "DuckDuckGo":
|
|
|
|
return 2
|
|
|
|
case "Qwant":
|
|
|
|
return 3
|
2024-05-18 01:59:29 +02:00
|
|
|
default:
|
|
|
|
return 4
|
|
|
|
}
|
2024-05-16 18:29:26 +02:00
|
|
|
}
|
|
|
|
|
2024-05-19 22:57:23 +02:00
|
|
|
func displayResults(w http.ResponseWriter, results []TextSearchResult, query, lang string, elapsed float64, page int, hasPrevPage, hasNextPage bool) {
|
|
|
|
tmpl, err := template.New("text.html").Funcs(template.FuncMap{
|
|
|
|
"sub": func(a, b int) int {
|
|
|
|
return a - b
|
|
|
|
},
|
|
|
|
"add": func(a, b int) int {
|
|
|
|
return a + b
|
|
|
|
},
|
|
|
|
}).ParseFiles("templates/text.html")
|
2024-05-17 14:26:28 +02:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
data := struct {
|
|
|
|
Results []TextSearchResult
|
|
|
|
Query string
|
|
|
|
Fetched string
|
2024-05-19 22:57:23 +02:00
|
|
|
Page int
|
|
|
|
HasPrevPage bool
|
|
|
|
HasNextPage bool
|
2024-05-17 14:26:28 +02:00
|
|
|
LanguageOptions []LanguageOption
|
|
|
|
CurrentLang string
|
|
|
|
}{
|
|
|
|
Results: results,
|
|
|
|
Query: query,
|
2024-05-18 01:59:29 +02:00
|
|
|
Fetched: fmt.Sprintf("%.2f seconds", elapsed),
|
2024-05-19 22:57:23 +02:00
|
|
|
Page: page,
|
|
|
|
HasPrevPage: hasPrevPage,
|
|
|
|
HasNextPage: hasNextPage,
|
2024-05-17 14:26:28 +02:00
|
|
|
LanguageOptions: languageOptions,
|
|
|
|
CurrentLang: lang,
|
|
|
|
}
|
|
|
|
|
|
|
|
err = tmpl.Execute(w, data)
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
|
|
|
}
|
2024-05-16 18:29:26 +02:00
|
|
|
}
|