package main

import (
	"crypto/md5"
	"encoding/hex"
	"fmt"
	"net/http"
	"time"
)

var imageSearchEngines []SearchEngine

var allImageSearchEngines = []SearchEngine{
	{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
	{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
	{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
	// {Name: "Imgur",    Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // example
}

func initImageEngines() {
	imageSearchEngines = nil

	for _, engineName := range config.MetaSearch.Image {
		for _, candidate := range allImageSearchEngines {
			if candidate.Name == engineName {
				imageSearchEngines = append(imageSearchEngines, candidate)
				break
			}
		}
	}
}

func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSettings, query string, page int) {
	startTime := time.Now()

	cacheKey := CacheKey{
		Query: query,
		Page:  page,
		Safe:  settings.SafeSearch == "active",
		Lang:  settings.SearchLanguage,
		Type:  "image",
	}

	// Check if JavaScript is disabled
	jsDisabled := r.URL.Query().Get("js_disabled") == "true"

	// Determine if we should cache images synchronously
	synchronous := jsDisabled

	combinedResults := getImageResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page, synchronous)

	elapsedTime := time.Since(startTime)

	// Prepare the data to pass to the template
	data := map[string]interface{}{
		"Results":          combinedResults,
		"Query":            query,
		"Fetched":          fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
		"Page":             page,
		"HasPrevPage":      page > 1,
		"HasNextPage":      len(combinedResults) >= 50,
		"NoResults":        len(combinedResults) == 0,
		"LanguageOptions":  languageOptions,
		"CurrentLang":      settings.SearchLanguage,
		"Theme":            settings.Theme,
		"Safe":             settings.SafeSearch,
		"IsThemeDark":      settings.IsThemeDark,
		"HardCacheEnabled": config.DriveCacheEnabled,
		"JsDisabled":       jsDisabled,
	}

	if r.URL.Query().Get("ajax") == "true" {
		// Render only the images
		renderTemplate(w, "images_only.html", data)
		return
	}

	// Render the full page
	renderTemplate(w, "images.html", data)
}

func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
	cacheChan := make(chan []SearchResult)
	var combinedResults []ImageSearchResult

	go func() {
		results, exists := resultsCache.Get(cacheKey)
		if exists {
			printDebug("Cache hit")
			cacheChan <- results
		} else {
			printDebug("Cache miss")
			cacheChan <- nil
		}
	}()

	select {
	case results := <-cacheChan:
		if results == nil {
			if config.MetaSearchEnabled {
				combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
				if len(combinedResults) > 0 {
					combinedResults = filterValidImages(combinedResults)
					resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
				}
			} else {
				printDebug("Crawler disabled; skipping fetching from image search engines.")
			}
		} else {
			_, _, imageResults, _ := convertToSpecificResults(results)
			combinedResults = filterValidImages(imageResults)
		}
	case <-time.After(2 * time.Second):
		printDebug("Cache check timeout")
		if config.MetaSearchEnabled {
			combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
			if len(combinedResults) > 0 {
				combinedResults = filterValidImages(combinedResults)
				resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
			}
		} else {
			printDebug("Crawler disabled; skipping fetching from image search engines.")
		}
	}

	return combinedResults
}

func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
	var results []ImageSearchResult

	// Check if MetaSearchEnabled is false
	if !config.MetaSearchEnabled {
		printDebug("Crawler is disabled; skipping image search engine fetching.")
		return results
	}

	engineCount := len(imageSearchEngines)

	// Determine the engine to use based on the page number
	engineIndex := (page - 1) % engineCount
	engine := imageSearchEngines[engineIndex]

	// Calculate the specific page number for the selected engine
	enginePage := (page-1)/engineCount + 1

	// Fetch results from the selected engine
	searchResults, _, err := engine.Func(query, safe, lang, enginePage)
	if err != nil {
		printWarn("Error performing image search with %s: %v", engine.Name, err)
	} else {
		for _, result := range searchResults {
			imageResult := result.(ImageSearchResult)

			// Skip image if thumbnail URL is empty
			if imageResult.Thumb == "" {
				printWarn("Skipping image with empty thumbnail URL. Full URL: %s", imageResult.Full)
				continue
			}

			// Generate hash and set up caching
			hasher := md5.New()
			hasher.Write([]byte(imageResult.Full))
			hash := hex.EncodeToString(hasher.Sum(nil))
			imageResult.ID = hash

			// Store mapping from imageID_full and imageID_thumb to URLs
			imageURLMapMu.Lock()
			imageURLMap[fmt.Sprintf("%s_full", hash)] = imageResult.Full
			imageURLMap[fmt.Sprintf("%s_thumb", hash)] = imageResult.Thumb
			imageURLMapMu.Unlock()

			// Set ProxyFull and ProxyThumb
			if config.DriveCacheEnabled {
				// Cache the thumbnail image asynchronously
				go func(imgResult ImageSearchResult) {
					_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
					if err != nil || !success {
						printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
						removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
					}
				}(imageResult)

				// Set ProxyThumb to the proxy URL (initially placeholder)
				imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)

				// Set ProxyFull to the proxy URL
				imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
			} else {
				// Hard cache disabled, proxy both thumb and full images
				imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
				imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
			}

			results = append(results, imageResult)
		}
	}

	// Fallback mechanism if no results are fetched from the selected engine
	if len(results) == 0 {
		printWarn("No image results found with engine %s, trying other engines.", engine.Name)
		for i := 1; i < engineCount; i++ {
			nextEngine := imageSearchEngines[(engineIndex+i)%engineCount]
			enginePage = (page-1)/engineCount + 1 // Recalculate page for next engine
			printInfo("Trying next image search engine: %s (engine page %d)", nextEngine.Name, enginePage)

			searchResults, _, err := nextEngine.Func(query, safe, lang, enginePage)
			if err != nil {
				printWarn("Error performing image search with %s: %v", nextEngine.Name, err)
				continue
			}
			for _, result := range searchResults {
				imageResult := result.(ImageSearchResult)

				// Skip image if thumbnail URL is empty
				if imageResult.Thumb == "" {
					printWarn("Skipping image with empty thumbnail URL. Full URL: %s", imageResult.Full)
					continue
				}

				// Generate hash and set up caching
				hasher := md5.New()
				hasher.Write([]byte(imageResult.Full))
				hash := hex.EncodeToString(hasher.Sum(nil))
				imageResult.ID = hash

				// Store mapping from imageID_full and imageID_thumb to URLs
				imageURLMapMu.Lock()
				imageURLMap[fmt.Sprintf("%s_full", hash)] = imageResult.Full
				imageURLMap[fmt.Sprintf("%s_thumb", hash)] = imageResult.Thumb
				imageURLMapMu.Unlock()

				if config.DriveCacheEnabled {
					// Cache the thumbnail image asynchronously
					go func(imgResult ImageSearchResult) {
						_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
						if err != nil || !success {
							printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
							removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
						}
					}(imageResult)

					// Set ProxyThumb to the proxy URL (initially placeholder)
					imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)

					// Set ProxyFull to the proxy URL
					imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
				} else {
					// Hard cache disabled, proxy both thumb and full images
					imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
					imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
				}

				results = append(results, imageResult)
			}

			if len(results) > 0 {
				break
			}
		}
	}

	// // Filter out images that failed to cache or are invalid
	// validResults := make([]ImageSearchResult, 0, len(results))
	// for _, imageResult := range results {
	// 	if imageResult.ProxyFull != "" {
	// 		validResults = append(validResults, imageResult)
	// 	} else {
	// 		printWarn("Skipping invalid image with ID %s", imageResult.ID)
	// 	}
	// }

	// Final debug print to show the count of results fetched
	printInfo("Fetched %d image results for overall page %d", len(results), page)

	return results
}

func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
	return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
		imageResults, duration, err := f(query, safe, lang, page)
		if err != nil {
			return nil, duration, err
		}
		searchResults := make([]SearchResult, len(imageResults))
		for i, result := range imageResults {
			searchResults[i] = result
		}
		return searchResults, duration, nil
	}
}