results fetching improvements

This commit is contained in:
partisan 2024-10-31 19:38:31 +01:00
parent 943ff0f684
commit e7430e85bc
2 changed files with 100 additions and 40 deletions

View file

@ -89,31 +89,33 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult { func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
var results []ImageSearchResult var results []ImageSearchResult
engineCount := len(imageSearchEngines)
safeBool := safe == "active" safeBool := safe == "active"
for _, engine := range imageSearchEngines { // Determine the engine to use based on the page number
printInfo("Using image search engine: %s", engine.Name) engineIndex := (page - 1) % engineCount
engine := imageSearchEngines[engineIndex]
searchResults, _, err := engine.Func(query, safe, lang, page) // Calculate the specific page number for the selected engine
enginePage := (page-1)/engineCount + 1
// Debug print to verify the selected engine and page
printInfo("Fetching image results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
// Fetch results from the selected engine
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
if err != nil { if err != nil {
printWarn("Error performing image search with %s: %v", engine.Name, err) printWarn("Error performing image search with %s: %v", engine.Name, err)
continue } else {
}
for _, result := range searchResults { for _, result := range searchResults {
imageResult := result.(ImageSearchResult) imageResult := result.(ImageSearchResult)
if config.HardCacheEnabled == true { if config.HardCacheEnabled {
// Generate hash and set up caching
// Generate hash from the original full-size image URL
hasher := md5.New() hasher := md5.New()
hasher.Write([]byte(imageResult.Full)) hasher.Write([]byte(imageResult.Full))
hash := hex.EncodeToString(hasher.Sum(nil)) hash := hex.EncodeToString(hasher.Sum(nil))
filename := hash + ".webp" filename := hash + ".webp"
// Assign the ID
imageResult.ID = hash imageResult.ID = hash
// Set the ProxyFull URL
imageResult.ProxyFull = "/image_cache/" + filename imageResult.ProxyFull = "/image_cache/" + filename
// Start caching and validation in the background // Start caching and validation in the background
@ -123,15 +125,56 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
printWarn("Failed to cache image %s: %v", originalURL, err) printWarn("Failed to cache image %s: %v", originalURL, err)
} }
if !success { if !success {
// Remove the image result from the cache
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID) removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
} }
}(imageResult, imageResult.Full, filename) }(imageResult, imageResult.Full, filename)
} else { } else {
// When hard cache is not enabled, use the imgproxy URLs // Use proxied URLs when hard cache is disabled
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb // Proxied thumbnail imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full // Proxied full-size image imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
}
results = append(results, imageResult)
}
}
// Fallback mechanism if no results are fetched from the selected engine
if len(results) == 0 {
printWarn("No image results found with engine %s, trying other engines.", engine.Name)
for i := 1; i < engineCount; i++ {
nextEngine := imageSearchEngines[(engineIndex+i)%engineCount]
enginePage = (page-1)/engineCount + 1 // Recalculate page for next engine
printInfo("Trying next image search engine: %s (engine page %d)", nextEngine.Name, enginePage)
searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
if err != nil {
printWarn("Error performing image search with %s: %v", nextEngine.Name, err)
continue
}
for _, result := range searchResults {
imageResult := result.(ImageSearchResult)
if config.HardCacheEnabled {
// Generate hash and set up caching
hasher := md5.New()
hasher.Write([]byte(imageResult.Full))
hash := hex.EncodeToString(hasher.Sum(nil))
filename := hash + ".webp"
imageResult.ID = hash
imageResult.ProxyFull = "/image_cache/" + filename
// Start caching and validation in the background
go func(imgResult ImageSearchResult, originalURL, filename string) {
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
if err != nil {
printWarn("Failed to cache image %s: %v", originalURL, err)
}
if !success {
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
}
}(imageResult, imageResult.Full, filename)
} else {
// Use proxied URLs when hard cache is disabled
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
} }
results = append(results, imageResult) results = append(results, imageResult)
} }
@ -140,12 +183,11 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
break break
} }
} }
if len(results) == 0 {
printWarn("No image results found for query: %s, trying other nodes", query)
results = tryOtherNodesForImageSearch(query, safe, lang, page, []string{hostID})
} }
// Final debug print to show the count of results fetched
printInfo("Fetched %d image results for overall page %d", len(results), page)
return results return results
} }

40
text.go
View file

@ -107,30 +107,48 @@ func prefetchPage(query, safe, lang string, page int) {
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult { func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult var results []TextSearchResult
engineCount := len(textSearchEngines)
for _, engine := range textSearchEngines { // Determine which engine to use for the current page
printInfo("Using search engine: %s", engine.Name) engineIndex := (page - 1) % engineCount
engine := textSearchEngines[engineIndex]
searchResults, _, err := engine.Func(query, safe, lang, page) // Calculate the page number for this engine
enginePage := (page-1)/engineCount + 1
// Debug print to verify engine and page number being fetched
printInfo("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
// Fetch results from the selected engine
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
if err != nil { if err != nil {
printWarn("Error performing search with %s: %v", engine.Name, err) printWarn("Error performing search with %s: %v", engine.Name, err)
continue } else {
results = append(results, validateResults(searchResults)...)
} }
results = append(results, validateResults(searchResults)...) // If no results are found with the selected engine, try the next in line
if len(results) == 0 {
for i := 1; i < engineCount; i++ {
nextEngine := textSearchEngines[(engineIndex+i)%engineCount]
enginePage = (page-1)/engineCount + 1 // Recalculate for the new engine
printInfo("No results found, trying next engine: %s (engine page %d)", nextEngine.Name, enginePage)
// If results are found, break out of the loop searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
if err != nil {
printWarn("Error performing search with %s: %v", nextEngine.Name, err)
continue
}
results = append(results, validateResults(searchResults)...)
if len(results) > 0 { if len(results) > 0 {
break break
} }
} }
// If no results found after trying all engines
if len(results) == 0 {
printWarn("No text results found for query: %s, trying other nodes", query)
results = tryOtherNodesForTextSearch(query, safe, lang, page, []string{hostID})
} }
// Final debug print to display results count and source
printInfo("Fetched %d results for overall page %d", len(results), page)
return results return results
} }