added caching to forums results
Some checks failed
Run Integration Tests / test (push) Failing after 22s
Some checks failed
Run Integration Tests / test (push) Failing after 22s
This commit is contained in:
parent
6c351c5f2b
commit
f2d9a37e87
5 changed files with 72 additions and 22 deletions
75
forums.go
75
forums.go
|
@ -3,7 +3,6 @@ package main
|
|||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
@ -106,22 +105,8 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
|||
// Start measuring the time for fetching results
|
||||
startTime := time.Now()
|
||||
|
||||
var results []ForumSearchResult
|
||||
var err error
|
||||
|
||||
// Check if CrawlerEnabled is true before performing Reddit search
|
||||
if config.CrawlerEnabled {
|
||||
results, err = PerformRedditSearch(query, settings.SafeSearch, page)
|
||||
} else {
|
||||
printDebug("Crawler is disabled; skipping Reddit search.")
|
||||
results = []ForumSearchResult{}
|
||||
}
|
||||
|
||||
// Use fallback (other nodes) if no results or an error occurred
|
||||
if err != nil || len(results) == 0 {
|
||||
log.Printf("No results from primary search, trying other nodes")
|
||||
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||
}
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "forum"}
|
||||
results := getForumResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||
|
||||
// Measure the elapsed time for fetching results
|
||||
elapsedTime := time.Since(startTime)
|
||||
|
@ -144,3 +129,59 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
|||
// Render the template without measuring the time
|
||||
renderTemplate(w, "forums.html", data)
|
||||
}
|
||||
|
||||
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []ForumSearchResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
printDebug("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
printDebug("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||
if config.CrawlerEnabled {
|
||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
printDebug("Crawler disabled; skipping fetching.")
|
||||
}
|
||||
} else {
|
||||
// Convert []SearchResult to []ForumSearchResult
|
||||
combinedResults = convertToForumResults(results)
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printDebug("Cache check timeout")
|
||||
if config.CrawlerEnabled {
|
||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
printDebug("Crawler disabled; skipping fetching.")
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func convertToForumResults(results []SearchResult) []ForumSearchResult {
|
||||
var forumResults []ForumSearchResult
|
||||
for _, r := range results {
|
||||
if res, ok := r.(ForumSearchResult); ok {
|
||||
forumResults = append(forumResults, res)
|
||||
}
|
||||
}
|
||||
return forumResults
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue