2024-08-13 16:31:28 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2024-10-13 00:04:46 +02:00
|
|
|
"crypto/md5"
|
|
|
|
"encoding/hex"
|
2024-08-13 16:31:28 +02:00
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
var imageSearchEngines []SearchEngine
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
imageSearchEngines = []SearchEngine{
|
2024-10-14 22:15:38 +02:00
|
|
|
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
|
|
|
|
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
|
|
|
|
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
|
2024-09-12 22:11:39 +02:00
|
|
|
//{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
|
2024-08-13 16:31:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func handleImageSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
|
|
|
startTime := time.Now()
|
|
|
|
|
2024-09-27 13:16:36 +02:00
|
|
|
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "image"}
|
|
|
|
combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
|
2024-08-13 16:31:28 +02:00
|
|
|
|
|
|
|
elapsedTime := time.Since(startTime)
|
|
|
|
|
2024-10-08 22:11:06 +02:00
|
|
|
// Prepare the data to pass to the template
|
|
|
|
data := map[string]interface{}{
|
2024-10-16 22:51:13 +02:00
|
|
|
"Results": combinedResults,
|
|
|
|
"Query": query,
|
|
|
|
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching
|
|
|
|
"Page": page,
|
|
|
|
"HasPrevPage": page > 1,
|
|
|
|
"HasNextPage": len(combinedResults) >= 50,
|
|
|
|
"NoResults": len(combinedResults) == 0,
|
|
|
|
"LanguageOptions": languageOptions,
|
|
|
|
"CurrentLang": settings.SearchLanguage,
|
|
|
|
"Theme": settings.Theme,
|
|
|
|
"Safe": settings.SafeSearch,
|
|
|
|
"IsThemeDark": settings.IsThemeDark,
|
2024-10-17 10:13:30 +02:00
|
|
|
"HardCacheEnabled": config.HardCacheDuration == 0,
|
2024-08-13 16:31:28 +02:00
|
|
|
}
|
|
|
|
|
2024-10-08 22:11:06 +02:00
|
|
|
// Render the template without measuring the time
|
|
|
|
renderTemplate(w, "images.html", data)
|
2024-08-13 16:31:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ImageSearchResult {
|
|
|
|
cacheChan := make(chan []SearchResult)
|
|
|
|
var combinedResults []ImageSearchResult
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
results, exists := resultsCache.Get(cacheKey)
|
|
|
|
if exists {
|
|
|
|
printInfo("Cache hit")
|
|
|
|
cacheChan <- results
|
|
|
|
} else {
|
|
|
|
printInfo("Cache miss")
|
|
|
|
cacheChan <- nil
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
select {
|
|
|
|
case results := <-cacheChan:
|
|
|
|
if results == nil {
|
|
|
|
combinedResults = fetchImageResults(query, safe, lang, page)
|
|
|
|
if len(combinedResults) > 0 {
|
2024-10-19 14:02:27 +02:00
|
|
|
combinedResults = filterValidImages(combinedResults)
|
2024-08-13 16:31:28 +02:00
|
|
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
_, _, imageResults := convertToSpecificResults(results)
|
2024-10-19 14:02:27 +02:00
|
|
|
combinedResults = filterValidImages(imageResults)
|
2024-08-13 16:31:28 +02:00
|
|
|
}
|
|
|
|
case <-time.After(2 * time.Second):
|
|
|
|
printInfo("Cache check timeout")
|
|
|
|
combinedResults = fetchImageResults(query, safe, lang, page)
|
|
|
|
if len(combinedResults) > 0 {
|
2024-10-19 14:02:27 +02:00
|
|
|
combinedResults = filterValidImages(combinedResults)
|
2024-08-13 16:31:28 +02:00
|
|
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return combinedResults
|
|
|
|
}
|
|
|
|
|
|
|
|
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
|
|
|
var results []ImageSearchResult
|
2024-10-31 19:38:31 +01:00
|
|
|
engineCount := len(imageSearchEngines)
|
2024-10-19 14:02:27 +02:00
|
|
|
safeBool := safe == "active"
|
2024-08-13 16:31:28 +02:00
|
|
|
|
2024-10-31 19:38:31 +01:00
|
|
|
// Determine the engine to use based on the page number
|
|
|
|
engineIndex := (page - 1) % engineCount
|
|
|
|
engine := imageSearchEngines[engineIndex]
|
2024-08-13 16:31:28 +02:00
|
|
|
|
2024-10-31 19:38:31 +01:00
|
|
|
// Calculate the specific page number for the selected engine
|
|
|
|
enginePage := (page-1)/engineCount + 1
|
2024-08-13 16:31:28 +02:00
|
|
|
|
2024-10-31 19:38:31 +01:00
|
|
|
// Debug print to verify the selected engine and page
|
|
|
|
printInfo("Fetching image results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
|
|
|
|
|
|
|
|
// Fetch results from the selected engine
|
|
|
|
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
|
|
|
|
if err != nil {
|
|
|
|
printWarn("Error performing image search with %s: %v", engine.Name, err)
|
|
|
|
} else {
|
2024-08-13 16:31:28 +02:00
|
|
|
for _, result := range searchResults {
|
2024-10-13 00:04:46 +02:00
|
|
|
imageResult := result.(ImageSearchResult)
|
2024-10-31 19:38:31 +01:00
|
|
|
if config.HardCacheEnabled {
|
|
|
|
// Generate hash and set up caching
|
2024-10-13 00:04:46 +02:00
|
|
|
hasher := md5.New()
|
2024-10-14 22:15:38 +02:00
|
|
|
hasher.Write([]byte(imageResult.Full))
|
2024-10-13 00:04:46 +02:00
|
|
|
hash := hex.EncodeToString(hasher.Sum(nil))
|
|
|
|
filename := hash + ".webp"
|
|
|
|
imageResult.ID = hash
|
2024-10-19 14:02:27 +02:00
|
|
|
imageResult.ProxyFull = "/image_cache/" + filename
|
|
|
|
|
|
|
|
// Start caching and validation in the background
|
|
|
|
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
|
|
|
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
2024-10-13 00:04:46 +02:00
|
|
|
if err != nil {
|
|
|
|
printWarn("Failed to cache image %s: %v", originalURL, err)
|
|
|
|
}
|
2024-10-19 14:02:27 +02:00
|
|
|
if !success {
|
|
|
|
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
|
|
|
}
|
|
|
|
}(imageResult, imageResult.Full, filename)
|
|
|
|
} else {
|
2024-10-31 19:38:31 +01:00
|
|
|
// Use proxied URLs when hard cache is disabled
|
|
|
|
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
|
|
|
|
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
|
2024-10-13 00:04:46 +02:00
|
|
|
}
|
|
|
|
results = append(results, imageResult)
|
2024-08-13 16:31:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-31 19:38:31 +01:00
|
|
|
// Fallback mechanism if no results are fetched from the selected engine
|
2024-08-13 16:31:28 +02:00
|
|
|
if len(results) == 0 {
|
2024-10-31 19:38:31 +01:00
|
|
|
printWarn("No image results found with engine %s, trying other engines.", engine.Name)
|
|
|
|
for i := 1; i < engineCount; i++ {
|
|
|
|
nextEngine := imageSearchEngines[(engineIndex+i)%engineCount]
|
|
|
|
enginePage = (page-1)/engineCount + 1 // Recalculate page for next engine
|
|
|
|
printInfo("Trying next image search engine: %s (engine page %d)", nextEngine.Name, enginePage)
|
|
|
|
|
|
|
|
searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
|
|
|
|
if err != nil {
|
|
|
|
printWarn("Error performing image search with %s: %v", nextEngine.Name, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
for _, result := range searchResults {
|
|
|
|
imageResult := result.(ImageSearchResult)
|
|
|
|
if config.HardCacheEnabled {
|
|
|
|
// Generate hash and set up caching
|
|
|
|
hasher := md5.New()
|
|
|
|
hasher.Write([]byte(imageResult.Full))
|
|
|
|
hash := hex.EncodeToString(hasher.Sum(nil))
|
|
|
|
filename := hash + ".webp"
|
|
|
|
imageResult.ID = hash
|
|
|
|
imageResult.ProxyFull = "/image_cache/" + filename
|
|
|
|
|
|
|
|
// Start caching and validation in the background
|
|
|
|
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
|
|
|
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
|
|
|
if err != nil {
|
|
|
|
printWarn("Failed to cache image %s: %v", originalURL, err)
|
|
|
|
}
|
|
|
|
if !success {
|
|
|
|
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
|
|
|
}
|
|
|
|
}(imageResult, imageResult.Full, filename)
|
|
|
|
} else {
|
|
|
|
// Use proxied URLs when hard cache is disabled
|
|
|
|
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
|
|
|
|
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
|
|
|
|
}
|
|
|
|
results = append(results, imageResult)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(results) > 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2024-08-13 16:31:28 +02:00
|
|
|
}
|
|
|
|
|
2024-10-31 19:38:31 +01:00
|
|
|
// Final debug print to show the count of results fetched
|
|
|
|
printInfo("Fetched %d image results for overall page %d", len(results), page)
|
|
|
|
|
2024-08-13 16:31:28 +02:00
|
|
|
return results
|
|
|
|
}
|
|
|
|
|
|
|
|
func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
|
|
|
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
|
|
|
imageResults, duration, err := f(query, safe, lang, page)
|
|
|
|
if err != nil {
|
|
|
|
return nil, duration, err
|
|
|
|
}
|
|
|
|
searchResults := make([]SearchResult, len(imageResults))
|
|
|
|
for i, result := range imageResults {
|
|
|
|
searchResults[i] = result
|
|
|
|
}
|
|
|
|
return searchResults, duration, nil
|
|
|
|
}
|
|
|
|
}
|
2024-10-19 14:02:27 +02:00
|
|
|
|
|
|
|
// func isValidImageURL(imageURL string) bool {
|
|
|
|
// client := &http.Client{
|
|
|
|
// Timeout: 10 * time.Second,
|
|
|
|
// Transport: &http.Transport{
|
|
|
|
// TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
|
|
|
// },
|
|
|
|
// }
|
|
|
|
|
|
|
|
// req, err := http.NewRequest("GET", imageURL, nil)
|
|
|
|
// if err != nil {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // Set headers to mimic a real browser
|
|
|
|
// req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "+
|
|
|
|
// "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36")
|
|
|
|
// req.Header.Set("Accept", "image/webp,image/*,*/*;q=0.8")
|
|
|
|
// req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
|
|
|
// req.Header.Set("Referer", imageURL) // Some servers require a referer
|
|
|
|
|
|
|
|
// resp, err := client.Do(req)
|
|
|
|
// if err != nil {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
// defer resp.Body.Close()
|
|
|
|
|
|
|
|
// if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // Limit the amount of data read to 10KB
|
|
|
|
// limitedReader := io.LimitReader(resp.Body, 10240) // 10KB
|
|
|
|
|
|
|
|
// // Attempt to decode image configuration
|
|
|
|
// _, _, err = image.DecodeConfig(limitedReader)
|
|
|
|
// if err != nil {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
|
|
|
|
// return true
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // This function can be used alternatively to isValidImageURL(), Its slower but reliable
|
|
|
|
// func isImageAccessible(imageURL string) bool {
|
|
|
|
// client := &http.Client{
|
|
|
|
// Timeout: 5 * time.Second,
|
|
|
|
// CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
|
|
|
// if len(via) >= 10 {
|
|
|
|
// return http.ErrUseLastResponse
|
|
|
|
// }
|
|
|
|
// return nil
|
|
|
|
// },
|
|
|
|
// }
|
|
|
|
|
|
|
|
// resp, err := client.Get(imageURL)
|
|
|
|
// if err != nil {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
// defer resp.Body.Close()
|
|
|
|
|
|
|
|
// if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // Read the entire image data
|
|
|
|
// data, err := io.ReadAll(resp.Body)
|
|
|
|
// if err != nil {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // Try to decode the image
|
|
|
|
// _, _, err = image.Decode(bytes.NewReader(data))
|
|
|
|
// if err != nil {
|
|
|
|
// return false
|
|
|
|
// }
|
|
|
|
|
|
|
|
// return true
|
|
|
|
// }
|