hopefully fixed dynamic image loading
This commit is contained in:
parent
3861fdc81c
commit
ccba37021a
5 changed files with 250 additions and 269 deletions
169
images.go
169
images.go
|
@ -19,11 +19,24 @@ func init() {
|
|||
}
|
||||
}
|
||||
|
||||
func handleImageSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||
func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSettings, query string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "image"}
|
||||
combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
|
||||
cacheKey := CacheKey{
|
||||
Query: query,
|
||||
Page: page,
|
||||
Safe: settings.SafeSearch == "active",
|
||||
Lang: settings.SearchLanguage,
|
||||
Type: "image",
|
||||
}
|
||||
|
||||
// Check if JavaScript is disabled
|
||||
jsDisabled := r.URL.Query().Get("js_disabled") == "true"
|
||||
|
||||
// Determine if we should cache images synchronously
|
||||
synchronous := jsDisabled
|
||||
|
||||
combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page, synchronous)
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
|
||||
|
@ -31,7 +44,7 @@ func handleImageSearch(w http.ResponseWriter, settings UserSettings, query strin
|
|||
data := map[string]interface{}{
|
||||
"Results": combinedResults,
|
||||
"Query": query,
|
||||
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching
|
||||
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
|
||||
"Page": page,
|
||||
"HasPrevPage": page > 1,
|
||||
"HasNextPage": len(combinedResults) >= 50,
|
||||
|
@ -41,14 +54,15 @@ func handleImageSearch(w http.ResponseWriter, settings UserSettings, query strin
|
|||
"Theme": settings.Theme,
|
||||
"Safe": settings.SafeSearch,
|
||||
"IsThemeDark": settings.IsThemeDark,
|
||||
"HardCacheEnabled": config.HardCacheDuration == 0,
|
||||
"HardCacheEnabled": config.HardCacheEnabled,
|
||||
"JsDisabled": jsDisabled,
|
||||
}
|
||||
|
||||
// Render the template without measuring the time
|
||||
// Render the full page
|
||||
renderTemplate(w, "images.html", data)
|
||||
}
|
||||
|
||||
func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ImageSearchResult {
|
||||
func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []ImageSearchResult
|
||||
|
||||
|
@ -66,7 +80,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
|||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||
if len(combinedResults) > 0 {
|
||||
combinedResults = filterValidImages(combinedResults)
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
|
@ -77,7 +91,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
|||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
printInfo("Cache check timeout")
|
||||
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||
if len(combinedResults) > 0 {
|
||||
combinedResults = filterValidImages(combinedResults)
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||
|
@ -87,7 +101,7 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
|||
return combinedResults
|
||||
}
|
||||
|
||||
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
||||
var results []ImageSearchResult
|
||||
engineCount := len(imageSearchEngines)
|
||||
safeBool := safe == "active"
|
||||
|
@ -99,9 +113,6 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
|||
// Calculate the specific page number for the selected engine
|
||||
enginePage := (page-1)/engineCount + 1
|
||||
|
||||
// Debug print to verify the selected engine and page
|
||||
printInfo("Fetching image results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
|
||||
|
||||
// Fetch results from the selected engine
|
||||
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
|
||||
if err != nil {
|
||||
|
@ -118,19 +129,26 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
|||
imageResult.ID = hash
|
||||
imageResult.ProxyFull = "/image_cache/" + filename
|
||||
|
||||
// Start caching and validation in the background
|
||||
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
||||
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
||||
if err != nil {
|
||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
if synchronous {
|
||||
// Synchronously cache the image
|
||||
_, success, err := cacheImage(imageResult.Full, filename, imageResult.ID)
|
||||
if err != nil || !success {
|
||||
printWarn("Failed to cache image %s: %v", imageResult.Full, err)
|
||||
// Fallback to proxy URL
|
||||
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
|
||||
}
|
||||
if !success {
|
||||
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
||||
}
|
||||
}(imageResult, imageResult.Full, filename)
|
||||
} else {
|
||||
// Start caching and validation in the background
|
||||
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
||||
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
||||
if err != nil || !success {
|
||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
||||
}
|
||||
}(imageResult, imageResult.Full, filename)
|
||||
}
|
||||
} else {
|
||||
// Use proxied URLs when hard cache is disabled
|
||||
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
|
||||
imageResult.ProxyFull = "/imgproxy?url=" + imageResult.Full
|
||||
}
|
||||
results = append(results, imageResult)
|
||||
|
@ -161,16 +179,30 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
|||
imageResult.ID = hash
|
||||
imageResult.ProxyFull = "/image_cache/" + filename
|
||||
|
||||
// Start caching and validation in the background
|
||||
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
||||
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
||||
if synchronous {
|
||||
// Synchronously cache the image
|
||||
_, success, err := cacheImage(imageResult.Full, filename, imageResult.ID)
|
||||
if err != nil {
|
||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
printWarn("Failed to cache image %s: %v", imageResult.Full, err)
|
||||
// Skip this image
|
||||
continue
|
||||
}
|
||||
if !success {
|
||||
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
||||
// Skip this image
|
||||
continue
|
||||
}
|
||||
}(imageResult, imageResult.Full, filename)
|
||||
} else {
|
||||
// Start caching and validation in the background
|
||||
go func(imgResult ImageSearchResult, originalURL, filename string) {
|
||||
_, success, err := cacheImage(originalURL, filename, imgResult.ID)
|
||||
if err != nil {
|
||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
}
|
||||
if !success {
|
||||
removeImageResultFromCache(query, page, safeBool, lang, imgResult.ID)
|
||||
}
|
||||
}(imageResult, imageResult.Full, filename)
|
||||
}
|
||||
} else {
|
||||
// Use proxied URLs when hard cache is disabled
|
||||
imageResult.ProxyThumb = "/imgproxy?url=" + imageResult.Thumb
|
||||
|
@ -204,82 +236,3 @@ func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResul
|
|||
return searchResults, duration, nil
|
||||
}
|
||||
}
|
||||
|
||||
// func isValidImageURL(imageURL string) bool {
|
||||
// client := &http.Client{
|
||||
// Timeout: 10 * time.Second,
|
||||
// Transport: &http.Transport{
|
||||
// TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
// },
|
||||
// }
|
||||
|
||||
// req, err := http.NewRequest("GET", imageURL, nil)
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
|
||||
// // Set headers to mimic a real browser
|
||||
// req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "+
|
||||
// "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36")
|
||||
// req.Header.Set("Accept", "image/webp,image/*,*/*;q=0.8")
|
||||
// req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||
// req.Header.Set("Referer", imageURL) // Some servers require a referer
|
||||
|
||||
// resp, err := client.Do(req)
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
// defer resp.Body.Close()
|
||||
|
||||
// if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
||||
// return false
|
||||
// }
|
||||
|
||||
// // Limit the amount of data read to 10KB
|
||||
// limitedReader := io.LimitReader(resp.Body, 10240) // 10KB
|
||||
|
||||
// // Attempt to decode image configuration
|
||||
// _, _, err = image.DecodeConfig(limitedReader)
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
|
||||
// return true
|
||||
// }
|
||||
|
||||
// // This function can be used alternatively to isValidImageURL(), Its slower but reliable
|
||||
// func isImageAccessible(imageURL string) bool {
|
||||
// client := &http.Client{
|
||||
// Timeout: 5 * time.Second,
|
||||
// CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
// if len(via) >= 10 {
|
||||
// return http.ErrUseLastResponse
|
||||
// }
|
||||
// return nil
|
||||
// },
|
||||
// }
|
||||
|
||||
// resp, err := client.Get(imageURL)
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
// defer resp.Body.Close()
|
||||
|
||||
// if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
||||
// return false
|
||||
// }
|
||||
|
||||
// // Read the entire image data
|
||||
// data, err := io.ReadAll(resp.Body)
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
|
||||
// // Try to decode the image
|
||||
// _, _, err = image.Decode(bytes.NewReader(data))
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
|
||||
// return true
|
||||
// }
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue