diff --git a/cache.go b/cache.go index 7ad54ca..c116cc9 100644 --- a/cache.go +++ b/cache.go @@ -195,14 +195,21 @@ func convertToSearchResults(results interface{}) []SearchResult { genericResults[i] = r } return genericResults + case []ForumSearchResult: + genericResults := make([]SearchResult, len(res)) + for i, r := range res { + genericResults[i] = r + } + return genericResults } return nil } -func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult) { +func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) { var textResults []TextSearchResult var torrentResults []TorrentResult var imageResults []ImageSearchResult + var forumResults []ForumSearchResult for _, r := range results { switch res := r.(type) { case TextSearchResult: @@ -211,7 +218,9 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor torrentResults = append(torrentResults, res) case ImageSearchResult: imageResults = append(imageResults, res) + case ForumSearchResult: + forumResults = append(forumResults, res) } } - return textResults, torrentResults, imageResults + return textResults, torrentResults, imageResults, forumResults } diff --git a/files.go b/files.go index 86d19f9..1755143 100755 --- a/files.go +++ b/files.go @@ -97,7 +97,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, printDebug("Crawler disabled; skipping fetching.") } } else { - _, torrentResults, _ := convertToSpecificResults(results) + _, torrentResults, _, _ := convertToSpecificResults(results) combinedResults = torrentResults } case <-time.After(2 * time.Second): diff --git a/forums.go b/forums.go index 1f82b8e..973c070 100755 --- a/forums.go +++ b/forums.go @@ -3,7 +3,6 @@ package main import ( "encoding/json" "fmt" - "log" "math" "net/http" "net/url" @@ -106,22 +105,8 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri // Start measuring the time for fetching results startTime := time.Now() - var results []ForumSearchResult - var err error - - // Check if CrawlerEnabled is true before performing Reddit search - if config.CrawlerEnabled { - results, err = PerformRedditSearch(query, settings.SafeSearch, page) - } else { - printDebug("Crawler is disabled; skipping Reddit search.") - results = []ForumSearchResult{} - } - - // Use fallback (other nodes) if no results or an error occurred - if err != nil || len(results) == 0 { - log.Printf("No results from primary search, trying other nodes") - results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page) - } + cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "forum"} + results := getForumResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page) // Measure the elapsed time for fetching results elapsedTime := time.Since(startTime) @@ -144,3 +129,59 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri // Render the template without measuring the time renderTemplate(w, "forums.html", data) } + +func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult { + cacheChan := make(chan []SearchResult) + var combinedResults []ForumSearchResult + + go func() { + results, exists := resultsCache.Get(cacheKey) + if exists { + printDebug("Cache hit") + cacheChan <- results + } else { + printDebug("Cache miss") + cacheChan <- nil + } + }() + + select { + case results := <-cacheChan: + if results == nil { + // Fetch only if the cache miss occurs and Crawler is enabled + if config.CrawlerEnabled { + combinedResults = fetchForumResults(query, safe, lang, page) + if len(combinedResults) > 0 { + resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) + } + } else { + printDebug("Crawler disabled; skipping fetching.") + } + } else { + // Convert []SearchResult to []ForumSearchResult + combinedResults = convertToForumResults(results) + } + case <-time.After(2 * time.Second): + printDebug("Cache check timeout") + if config.CrawlerEnabled { + combinedResults = fetchForumResults(query, safe, lang, page) + if len(combinedResults) > 0 { + resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) + } + } else { + printDebug("Crawler disabled; skipping fetching.") + } + } + + return combinedResults +} + +func convertToForumResults(results []SearchResult) []ForumSearchResult { + var forumResults []ForumSearchResult + for _, r := range results { + if res, ok := r.(ForumSearchResult); ok { + forumResults = append(forumResults, res) + } + } + return forumResults +} diff --git a/images.go b/images.go index 8ee181a..a044013 100755 --- a/images.go +++ b/images.go @@ -96,7 +96,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string printDebug("Crawler disabled; skipping fetching from image search engines.") } } else { - _, _, imageResults := convertToSpecificResults(results) + _, _, imageResults, _ := convertToSpecificResults(results) combinedResults = filterValidImages(imageResults) } case <-time.After(2 * time.Second): diff --git a/text.go b/text.go index 9b56ee6..4744a97 100755 --- a/text.go +++ b/text.go @@ -83,7 +83,7 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, printInfo("Crawler disabled; skipping fetching.") } } else { - textResults, _, _ := convertToSpecificResults(results) + textResults, _, _, _ := convertToSpecificResults(results) combinedResults = textResults } case <-time.After(2 * time.Second):