using the cache lmao
This commit is contained in:
parent
d5bbfe118d
commit
bc85d7d4a2
2 changed files with 66 additions and 20 deletions
43
cache.go
43
cache.go
|
@ -3,6 +3,7 @@ package main
|
|||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TextSearchResult represents a single search result item.
|
||||
|
@ -21,16 +22,24 @@ type CacheKey struct {
|
|||
Lang string
|
||||
}
|
||||
|
||||
// ResultsCache is a thread-safe map for caching search results by composite keys.
|
||||
type ResultsCache struct {
|
||||
mu sync.Mutex
|
||||
results map[string][]TextSearchResult
|
||||
// CachedItem represents an item stored in the cache with an expiration time.
|
||||
type CachedItem struct {
|
||||
Results []TextSearchResult
|
||||
StoredTime time.Time
|
||||
}
|
||||
|
||||
// NewResultsCache creates a new ResultsCache.
|
||||
func NewResultsCache() *ResultsCache {
|
||||
// ResultsCache is a thread-safe map for caching search results by composite keys.
|
||||
type ResultsCache struct {
|
||||
mu sync.Mutex
|
||||
results map[string]CachedItem
|
||||
expiration time.Duration
|
||||
}
|
||||
|
||||
// NewResultsCache creates a new ResultsCache with a specified expiration duration.
|
||||
func NewResultsCache(expiration time.Duration) *ResultsCache {
|
||||
return &ResultsCache{
|
||||
results: make(map[string][]TextSearchResult),
|
||||
results: make(map[string]CachedItem),
|
||||
expiration: expiration,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,15 +47,29 @@ func NewResultsCache() *ResultsCache {
|
|||
func (rc *ResultsCache) Get(key CacheKey) ([]TextSearchResult, bool) {
|
||||
rc.mu.Lock()
|
||||
defer rc.mu.Unlock()
|
||||
results, exists := rc.results[rc.keyToString(key)]
|
||||
return results, exists
|
||||
|
||||
item, exists := rc.results[rc.keyToString(key)]
|
||||
if !exists {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Check if the item has expired
|
||||
if time.Since(item.StoredTime) > rc.expiration {
|
||||
delete(rc.results, rc.keyToString(key))
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return item.Results, true
|
||||
}
|
||||
|
||||
// Set stores the results for a given key in the cache.
|
||||
func (rc *ResultsCache) Set(key CacheKey, results []TextSearchResult) {
|
||||
rc.mu.Lock()
|
||||
defer rc.mu.Unlock()
|
||||
rc.results[rc.keyToString(key)] = results
|
||||
rc.results[rc.keyToString(key)] = CachedItem{
|
||||
Results: results,
|
||||
StoredTime: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
// keyToString converts a CacheKey to a string representation.
|
||||
|
|
43
text.go
43
text.go
|
@ -1,4 +1,3 @@
|
|||
// text.go
|
||||
package main
|
||||
|
||||
import (
|
||||
|
@ -14,7 +13,7 @@ import (
|
|||
|
||||
var (
|
||||
debugMode bool
|
||||
resultsCache = NewResultsCache()
|
||||
resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -27,21 +26,45 @@ func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int)
|
|||
const resultsPerPage = 10
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe, Lang: lang}
|
||||
cacheChan := make(chan []TextSearchResult)
|
||||
var combinedResults []TextSearchResult
|
||||
var fromCache bool
|
||||
|
||||
// Try to get results from cache
|
||||
combinedResults, exists := resultsCache.Get(cacheKey)
|
||||
if !exists {
|
||||
// Fetch results for the current page
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
log.Println("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
log.Println("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case combinedResults = <-cacheChan:
|
||||
if combinedResults != nil {
|
||||
fromCache = true
|
||||
} else {
|
||||
combinedResults = fetchAndCacheResults(query, safe, lang, page, resultsPerPage)
|
||||
resultsCache.Set(cacheKey, combinedResults)
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
log.Println("Cache check timeout")
|
||||
combinedResults = fetchAndCacheResults(query, safe, lang, page, resultsPerPage)
|
||||
resultsCache.Set(cacheKey, combinedResults)
|
||||
}
|
||||
|
||||
// Pre-fetch and cache results for the next page
|
||||
nextPageResults := fetchAndCacheResults(query, safe, lang, page+1, resultsPerPage)
|
||||
resultsCache.Set(CacheKey{Query: query, Page: page + 1, Safe: safe, Lang: lang}, nextPageResults)
|
||||
// Only pre-fetch and cache results for the next page if we fetched new results
|
||||
if !fromCache {
|
||||
go func() {
|
||||
nextPageResults := fetchAndCacheResults(query, safe, lang, page+1, resultsPerPage)
|
||||
resultsCache.Set(CacheKey{Query: query, Page: page + 1, Safe: safe, Lang: lang}, nextPageResults)
|
||||
}()
|
||||
}
|
||||
|
||||
hasPrevPage := page > 1
|
||||
hasNextPage := len(nextPageResults) > 0
|
||||
hasNextPage := len(combinedResults) == resultsPerPage
|
||||
|
||||
displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue