package main

import (
	"encoding/json"
	"fmt"
	"net/http"
	"net/url"
	"time"
)

func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
	if !config.MetaSearchEnabled {
		printDebug("Crawler is disabled; skipping forum search.")
		return []ForumSearchResult{}, nil
	}

	const (
		pageSize = 25
		baseURL  = "https://www.reddit.com"
	)

	var results []ForumSearchResult
	offset := page * pageSize
	searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d",
		baseURL,
		url.QueryEscape(query),
		pageSize,
		offset,
	)

	// Create request
	req, err := http.NewRequest("GET", searchURL, nil)
	if err != nil {
		return nil, fmt.Errorf("creating request: %v", err)
	}

	// Set User-Agent
	userAgent, uaErr := GetUserAgent("Reddit-Forum-Search")
	if uaErr != nil {
		return nil, fmt.Errorf("getting user agent: %v", uaErr)
	}
	req.Header.Set("User-Agent", userAgent)

	// Make request using MetaProxy logic
	resp, err := DoMetaProxyRequest(req)
	if err != nil {
		return nil, fmt.Errorf("making request: %v", err)
	}
	defer resp.Body.Close()

	// Validate response status
	if resp.StatusCode != http.StatusOK {
		return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
	}

	// Parse JSON response
	var searchResults map[string]interface{}
	if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
		return nil, fmt.Errorf("decoding response: %v", err)
	}

	data, ok := searchResults["data"].(map[string]interface{})
	if !ok {
		return nil, fmt.Errorf("no data field in response")
	}

	posts, ok := data["children"].([]interface{})
	if !ok {
		return nil, fmt.Errorf("no children field in data")
	}

	// Extract search results
	for _, post := range posts {
		postData := post.(map[string]interface{})["data"].(map[string]interface{})
		if safe == "active" && postData["over_18"].(bool) {
			continue
		}

		header := postData["title"].(string)
		description := postData["selftext"].(string)
		if len(description) > 500 {
			description = description[:500] + "..."
		}

		publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
		permalink := postData["permalink"].(string)
		resultURL := fmt.Sprintf("%s%s", baseURL, permalink)

		result := ForumSearchResult{
			URL:           resultURL,
			Header:        header,
			Description:   description,
			PublishedDate: publishedDate,
		}

		thumbnail := postData["thumbnail"].(string)
		if parsedURL, err := url.Parse(thumbnail); err == nil && parsedURL.Scheme != "" {
			result.ImgSrc = postData["url"].(string)
			result.ThumbnailSrc = thumbnail
		}

		results = append(results, result)
	}

	return results, nil
}

func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
	// Start measuring the time for fetching results
	startTime := time.Now()

	cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "forum"}
	results := getForumResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)

	// Measure the elapsed time for fetching results
	elapsedTime := time.Since(startTime)

	// Prepare the data to pass to the template
	data := map[string]interface{}{
		"Query":           query,
		"Results":         results,
		"Page":            page,
		"Fetched":         FormatElapsedTime(elapsedTime),
		"HasPrevPage":     page > 1,
		"HasNextPage":     len(results) >= 25,
		"NoResults":       len(results) == 0,
		"LanguageOptions": languageOptions,
		"CurrentLang":     settings.SearchLanguage,
		"Theme":           settings.Theme,
		"Safe":            settings.SafeSearch,
		"IsThemeDark":     settings.IsThemeDark,
	}

	// Render the template without measuring the time
	renderTemplate(w, "forums.html", data)
}

func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
	cacheChan := make(chan []SearchResult)
	var combinedResults []ForumSearchResult

	go func() {
		results, exists := resultsCache.Get(cacheKey)
		if exists {
			printDebug("Cache hit")
			cacheChan <- results
		} else {
			printDebug("Cache miss")
			cacheChan <- nil
		}
	}()

	select {
	case results := <-cacheChan:
		if results == nil {
			// Fetch only if the cache miss occurs and Crawler is enabled
			if config.MetaSearchEnabled {
				combinedResults = fetchForumResults(query, safe, lang, page)
				if len(combinedResults) > 0 {
					resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
				}
			} else {
				printDebug("Crawler disabled; skipping fetching.")
			}
		} else {
			// Convert []SearchResult to []ForumSearchResult
			combinedResults = convertToForumResults(results)
		}
	case <-time.After(2 * time.Second):
		printDebug("Cache check timeout")
		if config.MetaSearchEnabled {
			combinedResults = fetchForumResults(query, safe, lang, page)
			if len(combinedResults) > 0 {
				resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
			}
		} else {
			printDebug("Crawler disabled; skipping fetching.")
		}
	}

	return combinedResults
}

func convertToForumResults(results []SearchResult) []ForumSearchResult {
	var forumResults []ForumSearchResult
	for _, r := range results {
		if res, ok := r.(ForumSearchResult); ok {
			forumResults = append(forumResults, res)
		}
	}
	return forumResults
}