Search/images.go

243 lines
7.4 KiB
Go
Executable file

package main
import (
"crypto/md5"
"encoding/hex"
"fmt"
"net/http"
"time"
)
var imageSearchEngines []SearchEngine
func init() {
imageSearchEngines = []SearchEngine{
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
//{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
}
}
func handleImageSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
startTime := time.Now()
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "image"}
combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
elapsedTime := time.Since(startTime)
// Prepare the data to pass to the template
data := map[string]interface{}{
"Results": combinedResults,
"Query": query,
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching
"Page": page,
"HasPrevPage": page > 1,
"HasNextPage": len(combinedResults) >= 50,
"NoResults": len(combinedResults) == 0,
"LanguageOptions": languageOptions,
"CurrentLang": settings.SearchLanguage,
"Theme": settings.Theme,
"Safe": settings.SafeSearch,
"IsThemeDark": settings.IsThemeDark,
"HardCacheEnabled": config.HardCacheDuration == 0,
}
// Render the template without measuring the time
renderTemplate(w, "images.html", data)
}
func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ImageSearchResult {
cacheChan := make(chan []SearchResult)
var combinedResults []ImageSearchResult
go func() {
results, exists := resultsCache.Get(cacheKey)
if exists {
printInfo("Cache hit")
cacheChan <- results
} else {
printInfo("Cache miss")
cacheChan <- nil
}
}()
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchImageResults(query, safe, lang, page)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
_, _, imageResults := convertToSpecificResults(results)
combinedResults = filterValidImages(imageResults)
}
case <-time.After(2 * time.Second):
printInfo("Cache check timeout")
combinedResults = fetchImageResults(query, safe, lang, page)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
}
return combinedResults
}
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
var results []ImageSearchResult
safeBool := safe == "active"
for _, engine := range imageSearchEngines {
printInfo("Using image search engine: %s", engine.Name)
searchResults, _, err := engine.Func(query, safe, lang, page)
if err != nil {
printWarn("Error performing image search with %s: %v", engine.Name, err)
continue
}
for _, result := range searchResults {
imageResult := result.(ImageSearchResult)
if config.HardCacheEnabled == true {
// Generate hash from the original full-size image URL
hasher := md5.New()
hasher.Write([]byte(imageResult.Full))
hash := hex.EncodeToString(hasher.Sum(nil))
filename := hash + ".webp"
// Assign the ID
imageResult.ID = hash
// Set the ProxyFull URL
imageResult.ProxyFull = "/image_cache/" + filename
// Start caching and validation in the background
go func(imgResult ImageSearchResult, originalURL, filename string) {
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
if err != nil {
printWarn("Failed to cache image %s: %v", originalURL, err)
}
if !success {
// Remove the image result from the cache
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
}
}(imageResult, imageResult.Full, filename)
} else {
// When hard cache is not enabled, use the imgproxy URLs
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb // Proxied thumbnail
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full // Proxied full-size image
}
results = append(results, imageResult)
}
if len(results) > 0 {
break
}
}
if len(results) == 0 {
printWarn("No image results found for query: %s, trying other nodes", query)
results = tryOtherNodesForImageSearch(query, safe, lang, page, []string{hostID})
}
return results
}
func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
imageResults, duration, err := f(query, safe, lang, page)
if err != nil {
return nil, duration, err
}
searchResults := make([]SearchResult, len(imageResults))
for i, result := range imageResults {
searchResults[i] = result
}
return searchResults, duration, nil
}
}
// func isValidImageURL(imageURL string) bool {
// client := &http.Client{
// Timeout: 10 * time.Second,
// Transport: &http.Transport{
// TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
// },
// }
// req, err := http.NewRequest("GET", imageURL, nil)
// if err != nil {
// return false
// }
// // Set headers to mimic a real browser
// req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "+
// "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36")
// req.Header.Set("Accept", "image/webp,image/*,*/*;q=0.8")
// req.Header.Set("Accept-Language", "en-US,en;q=0.9")
// req.Header.Set("Referer", imageURL) // Some servers require a referer
// resp, err := client.Do(req)
// if err != nil {
// return false
// }
// defer resp.Body.Close()
// if resp.StatusCode < 200 || resp.StatusCode >= 400 {
// return false
// }
// // Limit the amount of data read to 10KB
// limitedReader := io.LimitReader(resp.Body, 10240) // 10KB
// // Attempt to decode image configuration
// _, _, err = image.DecodeConfig(limitedReader)
// if err != nil {
// return false
// }
// return true
// }
// // This function can be used alternatively to isValidImageURL(), Its slower but reliable
// func isImageAccessible(imageURL string) bool {
// client := &http.Client{
// Timeout: 5 * time.Second,
// CheckRedirect: func(req *http.Request, via []*http.Request) error {
// if len(via) >= 10 {
// return http.ErrUseLastResponse
// }
// return nil
// },
// }
// resp, err := client.Get(imageURL)
// if err != nil {
// return false
// }
// defer resp.Body.Close()
// if resp.StatusCode < 200 || resp.StatusCode >= 400 {
// return false
// }
// // Read the entire image data
// data, err := io.ReadAll(resp.Body)
// if err != nil {
// return false
// }
// // Try to decode the image
// _, _, err = image.Decode(bytes.NewReader(data))
// if err != nil {
// return false
// }
// return true
// }