results fetching improvements
This commit is contained in:
parent
943ff0f684
commit
e7430e85bc
2 changed files with 100 additions and 40 deletions
92
images.go
92
images.go
|
@ -89,31 +89,33 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
|||
|
||||
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||
var results []ImageSearchResult
|
||||
engineCount := len(imageSearchEngines)
|
||||
safeBool := safe == "active"
|
||||
|
||||
for _, engine := range imageSearchEngines {
|
||||
printInfo("Using image search engine: %s", engine.Name)
|
||||
// Determine the engine to use based on the page number
|
||||
engineIndex := (page - 1) % engineCount
|
||||
engine := imageSearchEngines[engineIndex]
|
||||
|
||||
searchResults, _, err := engine.Func(query, safe, lang, page)
|
||||
if err != nil {
|
||||
printWarn("Error performing image search with %s: %v", engine.Name, err)
|
||||
continue
|
||||
}
|
||||
// Calculate the specific page number for the selected engine
|
||||
enginePage := (page-1)/engineCount + 1
|
||||
|
||||
// Debug print to verify the selected engine and page
|
||||
printInfo("Fetching image results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
|
||||
|
||||
// Fetch results from the selected engine
|
||||
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
|
||||
if err != nil {
|
||||
printWarn("Error performing image search with %s: %v", engine.Name, err)
|
||||
} else {
|
||||
for _, result := range searchResults {
|
||||
imageResult := result.(ImageSearchResult)
|
||||
if config.HardCacheEnabled == true {
|
||||
|
||||
// Generate hash from the original full-size image URL
|
||||
if config.HardCacheEnabled {
|
||||
// Generate hash and set up caching
|
||||
hasher := md5.New()
|
||||
hasher.Write([]byte(imageResult.Full))
|
||||
hash := hex.EncodeToString(hasher.Sum(nil))
|
||||
filename := hash + ".webp"
|
||||
|
||||
// Assign the ID
|
||||
imageResult.ID = hash
|
||||
|
||||
// Set the ProxyFull URL
|
||||
imageResult.ProxyFull = "/image_cache/" + filename
|
||||
|
||||
// Start caching and validation in the background
|
||||
|
@ -123,28 +125,68 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
|||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
}
|
||||
if !success {
|
||||
// Remove the image result from the cache
|
||||
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
||||
}
|
||||
}(imageResult, imageResult.Full, filename)
|
||||
|
||||
} else {
|
||||
// When hard cache is not enabled, use the imgproxy URLs
|
||||
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb // Proxied thumbnail
|
||||
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full // Proxied full-size image
|
||||
// Use proxied URLs when hard cache is disabled
|
||||
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
|
||||
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
|
||||
}
|
||||
results = append(results, imageResult)
|
||||
}
|
||||
}
|
||||
|
||||
if len(results) > 0 {
|
||||
break
|
||||
// Fallback mechanism if no results are fetched from the selected engine
|
||||
if len(results) == 0 {
|
||||
printWarn("No image results found with engine %s, trying other engines.", engine.Name)
|
||||
for i := 1; i < engineCount; i++ {
|
||||
nextEngine := imageSearchEngines[(engineIndex+i)%engineCount]
|
||||
enginePage = (page-1)/engineCount + 1 // Recalculate page for next engine
|
||||
printInfo("Trying next image search engine: %s (engine page %d)", nextEngine.Name, enginePage)
|
||||
|
||||
searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
|
||||
if err != nil {
|
||||
printWarn("Error performing image search with %s: %v", nextEngine.Name, err)
|
||||
continue
|
||||
}
|
||||
for _, result := range searchResults {
|
||||
imageResult := result.(ImageSearchResult)
|
||||
if config.HardCacheEnabled {
|
||||
// Generate hash and set up caching
|
||||
hasher := md5.New()
|
||||
hasher.Write([]byte(imageResult.Full))
|
||||
hash := hex.EncodeToString(hasher.Sum(nil))
|
||||
filename := hash + ".webp"
|
||||
imageResult.ID = hash
|
||||
imageResult.ProxyFull = "/image_cache/" + filename
|
||||
|
||||
// Start caching and validation in the background
|
||||
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
||||
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
||||
if err != nil {
|
||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
}
|
||||
if !success {
|
||||
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
||||
}
|
||||
}(imageResult, imageResult.Full, filename)
|
||||
} else {
|
||||
// Use proxied URLs when hard cache is disabled
|
||||
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
|
||||
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
|
||||
}
|
||||
results = append(results, imageResult)
|
||||
}
|
||||
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
printWarn("No image results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForImageSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
// Final debug print to show the count of results fetched
|
||||
printInfo("Fetched %d image results for overall page %d", len(results), page)
|
||||
|
||||
return results
|
||||
}
|
||||
|
|
48
text.go
48
text.go
|
@ -107,29 +107,47 @@ func prefetchPage(query, safe, lang string, page int) {
|
|||
|
||||
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||
var results []TextSearchResult
|
||||
engineCount := len(textSearchEngines)
|
||||
|
||||
for _, engine := range textSearchEngines {
|
||||
printInfo("Using search engine: %s", engine.Name)
|
||||
// Determine which engine to use for the current page
|
||||
engineIndex := (page - 1) % engineCount
|
||||
engine := textSearchEngines[engineIndex]
|
||||
|
||||
searchResults, _, err := engine.Func(query, safe, lang, page)
|
||||
if err != nil {
|
||||
printWarn("Error performing search with %s: %v", engine.Name, err)
|
||||
continue
|
||||
}
|
||||
// Calculate the page number for this engine
|
||||
enginePage := (page-1)/engineCount + 1
|
||||
|
||||
// Debug print to verify engine and page number being fetched
|
||||
printInfo("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
|
||||
|
||||
// Fetch results from the selected engine
|
||||
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
|
||||
if err != nil {
|
||||
printWarn("Error performing search with %s: %v", engine.Name, err)
|
||||
} else {
|
||||
results = append(results, validateResults(searchResults)...)
|
||||
}
|
||||
|
||||
// If results are found, break out of the loop
|
||||
if len(results) > 0 {
|
||||
break
|
||||
// If no results are found with the selected engine, try the next in line
|
||||
if len(results) == 0 {
|
||||
for i := 1; i < engineCount; i++ {
|
||||
nextEngine := textSearchEngines[(engineIndex+i)%engineCount]
|
||||
enginePage = (page-1)/engineCount + 1 // Recalculate for the new engine
|
||||
printInfo("No results found, trying next engine: %s (engine page %d)", nextEngine.Name, enginePage)
|
||||
|
||||
searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
|
||||
if err != nil {
|
||||
printWarn("Error performing search with %s: %v", nextEngine.Name, err)
|
||||
continue
|
||||
}
|
||||
results = append(results, validateResults(searchResults)...)
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no results found after trying all engines
|
||||
if len(results) == 0 {
|
||||
printWarn("No text results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForTextSearch(query, safe, lang, page, []string{hostID})
|
||||
}
|
||||
// Final debug print to display results count and source
|
||||
printInfo("Fetched %d results for overall page %d", len(results), page)
|
||||
|
||||
return results
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue