package main import ( "crypto/md5" "encoding/hex" "fmt" "net/http" "time" ) var imageSearchEngines []SearchEngine func init() { imageSearchEngines = []SearchEngine{ {Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)}, {Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)}, {Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)}, //{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working } } func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSettings, query string, page int) { startTime := time.Now() cacheKey := CacheKey{ Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "image", } // Check if JavaScript is disabled jsDisabled := r.URL.Query().Get("js_disabled") == "true" // Determine if we should cache images synchronously synchronous := jsDisabled combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page, synchronous) elapsedTime := time.Since(startTime) // Prepare the data to pass to the template data := map[string]interface{}{ "Results": combinedResults, "Query": query, "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), "Page": page, "HasPrevPage": page > 1, "HasNextPage": len(combinedResults) >= 50, "NoResults": len(combinedResults) == 0, "LanguageOptions": languageOptions, "CurrentLang": settings.SearchLanguage, "Theme": settings.Theme, "Safe": settings.SafeSearch, "IsThemeDark": settings.IsThemeDark, "HardCacheEnabled": config.HardCacheEnabled, "JsDisabled": jsDisabled, } // Render the full page renderTemplate(w, "images.html", data) } func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int, synchronous bool) []ImageSearchResult { cacheChan := make(chan []SearchResult) var combinedResults []ImageSearchResult go func() { results, exists := resultsCache.Get(cacheKey) if exists { printInfo("Cache hit") cacheChan <- results } else { printInfo("Cache miss") cacheChan <- nil } }() select { case results := <-cacheChan: if results == nil { combinedResults = fetchImageResults(query, safe, lang, page, synchronous) if len(combinedResults) > 0 { combinedResults = filterValidImages(combinedResults) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) } } else { _, _, imageResults := convertToSpecificResults(results) combinedResults = filterValidImages(imageResults) } case <-time.After(2 * time.Second): printInfo("Cache check timeout") combinedResults = fetchImageResults(query, safe, lang, page, synchronous) if len(combinedResults) > 0 { combinedResults = filterValidImages(combinedResults) resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) } } return combinedResults } func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult { var results []ImageSearchResult engineCount := len(imageSearchEngines) safeBool := safe == "active" // Determine the engine to use based on the page number engineIndex := (page - 1) % engineCount engine := imageSearchEngines[engineIndex] // Calculate the specific page number for the selected engine enginePage := (page-1)/engineCount + 1 // Fetch results from the selected engine searchResults, _, err := engine.Func(query, safe, lang, enginePage) if err != nil { printWarn("Error performing image search with %s: %v", engine.Name, err) } else { for _, result := range searchResults { imageResult := result.(ImageSearchResult) if config.HardCacheEnabled { // Generate hash and set up caching hasher := md5.New() hasher.Write([]byte(imageResult.Full)) hash := hex.EncodeToString(hasher.Sum(nil)) filename := hash + ".webp" imageResult.ID = hash imageResult.ProxyFull = "/image_cache/" + filename if synchronous { // Synchronously cache the image _, success, err := cacheImage(imageResult.Full, filename, imageResult.ID) if err != nil || !success { printWarn("Failed to cache image %s: %v", imageResult.Full, err) // Fallback to proxy URL imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full } } else { // Start caching and validation in the background go func(imgResult ImageSearchResult, originalURL, filename string) { _, success, err := cacheImage(originalURL, filename, imgResult.ID) if err != nil || !success { printWarn("Failed to cache image %s: %v", originalURL, err) removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID) } }(imageResult, imageResult.Full, filename) } } else { // Use proxied URLs when hard cache is disabled imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full } results = append(results, imageResult) } } // Fallback mechanism if no results are fetched from the selected engine if len(results) == 0 { printWarn("No image results found with engine %s, trying other engines.", engine.Name) for i := 1; i < engineCount; i++ { nextEngine := imageSearchEngines[(engineIndex+i)%engineCount] enginePage = (page-1)/engineCount + 1 // Recalculate page for next engine printInfo("Trying next image search engine: %s (engine page %d)", nextEngine.Name, enginePage) searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage) if err != nil { printWarn("Error performing image search with %s: %v", nextEngine.Name, err) continue } for _, result := range searchResults { imageResult := result.(ImageSearchResult) if config.HardCacheEnabled { // Generate hash and set up caching hasher := md5.New() hasher.Write([]byte(imageResult.Full)) hash := hex.EncodeToString(hasher.Sum(nil)) filename := hash + ".webp" imageResult.ID = hash imageResult.ProxyFull = "/image_cache/" + filename if synchronous { // Synchronously cache the image _, success, err := cacheImage(imageResult.Full, filename, imageResult.ID) if err != nil { printWarn("Failed to cache image %s: %v", imageResult.Full, err) // Skip this image continue } if !success { // Skip this image continue } } else { // Start caching and validation in the background go func(imgResult ImageSearchResult, originalURL, filename string) { _, success, err := cacheImage(originalURL, filename, imgResult.ID) if err != nil { printWarn("Failed to cache image %s: %v", originalURL, err) } if !success { removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID) } }(imageResult, imageResult.Full, filename) } } else { // Use proxied URLs when hard cache is disabled imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full } results = append(results, imageResult) } if len(results) > 0 { break } } } // Filter out images that failed to cache or are invalid validResults := make([]ImageSearchResult, 0, len(results)) for _, imageResult := range results { if imageResult.ProxyFull != "" { validResults = append(validResults, imageResult) } else { printWarn("Skipping invalid image with ID %s", imageResult.ID) } } // Final debug print to show the count of results fetched printInfo("Fetched %d image results for overall page %d", len(results), page) return validResults } func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) { return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) { imageResults, duration, err := f(query, safe, lang, page) if err != nil { return nil, duration, err } searchResults := make([]SearchResult, len(imageResults)) for i, result := range imageResults { searchResults[i] = result } return searchResults, duration, nil } }