added caching to forums results
Some checks failed
Run Integration Tests / test (push) Failing after 22s
Some checks failed
Run Integration Tests / test (push) Failing after 22s
This commit is contained in:
parent
6c351c5f2b
commit
f2d9a37e87
5 changed files with 72 additions and 22 deletions
13
cache.go
13
cache.go
|
@ -195,14 +195,21 @@ func convertToSearchResults(results interface{}) []SearchResult {
|
||||||
genericResults[i] = r
|
genericResults[i] = r
|
||||||
}
|
}
|
||||||
return genericResults
|
return genericResults
|
||||||
|
case []ForumSearchResult:
|
||||||
|
genericResults := make([]SearchResult, len(res))
|
||||||
|
for i, r := range res {
|
||||||
|
genericResults[i] = r
|
||||||
|
}
|
||||||
|
return genericResults
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult) {
|
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) {
|
||||||
var textResults []TextSearchResult
|
var textResults []TextSearchResult
|
||||||
var torrentResults []TorrentResult
|
var torrentResults []TorrentResult
|
||||||
var imageResults []ImageSearchResult
|
var imageResults []ImageSearchResult
|
||||||
|
var forumResults []ForumSearchResult
|
||||||
for _, r := range results {
|
for _, r := range results {
|
||||||
switch res := r.(type) {
|
switch res := r.(type) {
|
||||||
case TextSearchResult:
|
case TextSearchResult:
|
||||||
|
@ -211,7 +218,9 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor
|
||||||
torrentResults = append(torrentResults, res)
|
torrentResults = append(torrentResults, res)
|
||||||
case ImageSearchResult:
|
case ImageSearchResult:
|
||||||
imageResults = append(imageResults, res)
|
imageResults = append(imageResults, res)
|
||||||
|
case ForumSearchResult:
|
||||||
|
forumResults = append(forumResults, res)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return textResults, torrentResults, imageResults
|
return textResults, torrentResults, imageResults, forumResults
|
||||||
}
|
}
|
||||||
|
|
2
files.go
2
files.go
|
@ -97,7 +97,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
printDebug("Crawler disabled; skipping fetching.")
|
printDebug("Crawler disabled; skipping fetching.")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
_, torrentResults, _ := convertToSpecificResults(results)
|
_, torrentResults, _, _ := convertToSpecificResults(results)
|
||||||
combinedResults = torrentResults
|
combinedResults = torrentResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
|
|
75
forums.go
75
forums.go
|
@ -3,7 +3,6 @@ package main
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -106,22 +105,8 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
||||||
// Start measuring the time for fetching results
|
// Start measuring the time for fetching results
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
|
||||||
var results []ForumSearchResult
|
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "forum"}
|
||||||
var err error
|
results := getForumResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||||
|
|
||||||
// Check if CrawlerEnabled is true before performing Reddit search
|
|
||||||
if config.CrawlerEnabled {
|
|
||||||
results, err = PerformRedditSearch(query, settings.SafeSearch, page)
|
|
||||||
} else {
|
|
||||||
printDebug("Crawler is disabled; skipping Reddit search.")
|
|
||||||
results = []ForumSearchResult{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use fallback (other nodes) if no results or an error occurred
|
|
||||||
if err != nil || len(results) == 0 {
|
|
||||||
log.Printf("No results from primary search, trying other nodes")
|
|
||||||
results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Measure the elapsed time for fetching results
|
// Measure the elapsed time for fetching results
|
||||||
elapsedTime := time.Since(startTime)
|
elapsedTime := time.Since(startTime)
|
||||||
|
@ -144,3 +129,59 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
||||||
// Render the template without measuring the time
|
// Render the template without measuring the time
|
||||||
renderTemplate(w, "forums.html", data)
|
renderTemplate(w, "forums.html", data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
|
||||||
|
cacheChan := make(chan []SearchResult)
|
||||||
|
var combinedResults []ForumSearchResult
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
results, exists := resultsCache.Get(cacheKey)
|
||||||
|
if exists {
|
||||||
|
printDebug("Cache hit")
|
||||||
|
cacheChan <- results
|
||||||
|
} else {
|
||||||
|
printDebug("Cache miss")
|
||||||
|
cacheChan <- nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case results := <-cacheChan:
|
||||||
|
if results == nil {
|
||||||
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
|
if config.CrawlerEnabled {
|
||||||
|
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||||
|
if len(combinedResults) > 0 {
|
||||||
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler disabled; skipping fetching.")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Convert []SearchResult to []ForumSearchResult
|
||||||
|
combinedResults = convertToForumResults(results)
|
||||||
|
}
|
||||||
|
case <-time.After(2 * time.Second):
|
||||||
|
printDebug("Cache check timeout")
|
||||||
|
if config.CrawlerEnabled {
|
||||||
|
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||||
|
if len(combinedResults) > 0 {
|
||||||
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printDebug("Crawler disabled; skipping fetching.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return combinedResults
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToForumResults(results []SearchResult) []ForumSearchResult {
|
||||||
|
var forumResults []ForumSearchResult
|
||||||
|
for _, r := range results {
|
||||||
|
if res, ok := r.(ForumSearchResult); ok {
|
||||||
|
forumResults = append(forumResults, res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return forumResults
|
||||||
|
}
|
||||||
|
|
|
@ -96,7 +96,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
_, _, imageResults := convertToSpecificResults(results)
|
_, _, imageResults, _ := convertToSpecificResults(results)
|
||||||
combinedResults = filterValidImages(imageResults)
|
combinedResults = filterValidImages(imageResults)
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
|
|
2
text.go
2
text.go
|
@ -83,7 +83,7 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
printInfo("Crawler disabled; skipping fetching.")
|
printInfo("Crawler disabled; skipping fetching.")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
textResults, _, _ := convertToSpecificResults(results)
|
textResults, _, _, _ := convertToSpecificResults(results)
|
||||||
combinedResults = textResults
|
combinedResults = textResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
|
|
Loading…
Add table
Reference in a new issue