added caching to forums results
Some checks failed
Run Integration Tests / test (push) Failing after 22s
Some checks failed
Run Integration Tests / test (push) Failing after 22s
This commit is contained in:
parent
6c351c5f2b
commit
f2d9a37e87
5 changed files with 72 additions and 22 deletions
13
cache.go
13
cache.go
|
@ -195,14 +195,21 @@ func convertToSearchResults(results interface{}) []SearchResult {
|
|||
genericResults[i] = r
|
||||
}
|
||||
return genericResults
|
||||
case []ForumSearchResult:
|
||||
genericResults := make([]SearchResult, len(res))
|
||||
for i, r := range res {
|
||||
genericResults[i] = r
|
||||
}
|
||||
return genericResults
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult) {
|
||||
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) {
|
||||
var textResults []TextSearchResult
|
||||
var torrentResults []TorrentResult
|
||||
var imageResults []ImageSearchResult
|
||||
var forumResults []ForumSearchResult
|
||||
for _, r := range results {
|
||||
switch res := r.(type) {
|
||||
case TextSearchResult:
|
||||
|
@ -211,7 +218,9 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor
|
|||
torrentResults = append(torrentResults, res)
|
||||
case ImageSearchResult:
|
||||
imageResults = append(imageResults, res)
|
||||
case ForumSearchResult:
|
||||
forumResults = append(forumResults, res)
|
||||
}
|
||||
}
|
||||
return textResults, torrentResults, imageResults
|
||||
return textResults, torrentResults, imageResults, forumResults
|
||||
}
|
||||
|
|
2
files.go
2
files.go
|
@ -97,7 +97,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
|||
printDebug("Crawler disabled; skipping fetching.")
|
||||
}
|
||||
} else {
|
||||
_, torrentResults, _ := convertToSpecificResults(results)
|
||||
_, torrentResults, _, _ := convertToSpecificResults(results)
|
||||
combinedResults = torrentResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
|
|
75
forums.go
75
forums.go
|
@ -3,7 +3,6 @@ package main
|
|||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
@ -106,22 +105,8 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
|||
// Start measuring the time for fetching results
|
||||
startTime := time.Now()
|
||||
|
||||
var results []ForumSearchResult
|
||||
var err error
|
||||
|
||||
// Check if CrawlerEnabled is true before performing Reddit search
|
||||
if config.CrawlerEnabled {
|
||||
results, err = PerformRedditSearch(query, settings.SafeSearch, page)
|
||||
} else {
|
||||
printDebug("Crawler is disabled; skipping Reddit search.")
|
||||
results = []ForumSearchResult{}
|
||||
}
|
||||
|
||||
// Use fallback (other nodes) if no results or an error occurred
|
||||
if err != nil || len(results) == 0 {
|
||||
log.Printf("No results from primary search, trying other nodes")
|
||||
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||
}
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "forum"}
|
||||
results := getForumResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||
|
||||
// Measure the elapsed time for fetching results
|
||||
elapsedTime := time.Since(startTime)
|
||||
|
@ -144,3 +129,59 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
|||
// Render the template without measuring the time
|
||||
renderTemplate(w, "forums.html", data)
|
||||
}
|
||||
|
||||
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []ForumSearchResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
printDebug("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
printDebug("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||
if config.CrawlerEnabled {
|
||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
printDebug("Crawler disabled; skipping fetching.")
|
||||
}
|
||||
} else {
|
||||
// Convert []SearchResult to []ForumSearchResult
|
||||
combinedResults = convertToForumResults(results)
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printDebug("Cache check timeout")
|
||||
if config.CrawlerEnabled {
|
||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||
if len(combinedResults) > 0 {
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
}
|
||||
} else {
|
||||
printDebug("Crawler disabled; skipping fetching.")
|
||||
}
|
||||
}
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func convertToForumResults(results []SearchResult) []ForumSearchResult {
|
||||
var forumResults []ForumSearchResult
|
||||
for _, r := range results {
|
||||
if res, ok := r.(ForumSearchResult); ok {
|
||||
forumResults = append(forumResults, res)
|
||||
}
|
||||
}
|
||||
return forumResults
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
|||
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
||||
}
|
||||
} else {
|
||||
_, _, imageResults := convertToSpecificResults(results)
|
||||
_, _, imageResults, _ := convertToSpecificResults(results)
|
||||
combinedResults = filterValidImages(imageResults)
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
|
|
2
text.go
2
text.go
|
@ -83,7 +83,7 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
|||
printInfo("Crawler disabled; skipping fetching.")
|
||||
}
|
||||
} else {
|
||||
textResults, _, _ := convertToSpecificResults(results)
|
||||
textResults, _, _, _ := convertToSpecificResults(results)
|
||||
combinedResults = textResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
|
|
Loading…
Add table
Reference in a new issue