package main

import (
	"encoding/json"
	"fmt"
	"log"
	"math"
	"net/http"
	"net/url"
	"time"
)

func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
	const (
		pageSize       = 25
		baseURL        = "https://www.reddit.com"
		maxRetries     = 5
		initialBackoff = 2 * time.Second
	)
	var results []ForumSearchResult

	searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
	var resp *http.Response
	var err error

	// Retry logic with exponential backoff
	for i := 0; i <= maxRetries; i++ {
		resp, err = http.Get(searchURL)
		if err != nil {
			return nil, fmt.Errorf("making request: %v", err)
		}
		if resp.StatusCode != http.StatusTooManyRequests {
			break
		}

		// Wait for some time before retrying
		backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
		time.Sleep(backoff)
	}

	if err != nil {
		return nil, fmt.Errorf("making request: %v", err)
	}
	defer resp.Body.Close()

	if resp.StatusCode != http.StatusOK {
		return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
	}

	var searchResults map[string]interface{}
	if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
		return nil, fmt.Errorf("decoding response: %v", err)
	}

	data, ok := searchResults["data"].(map[string]interface{})
	if !ok {
		return nil, fmt.Errorf("no data field in response")
	}

	posts, ok := data["children"].([]interface{})
	if !ok {
		return nil, fmt.Errorf("no children field in data")
	}

	for _, post := range posts {
		postData := post.(map[string]interface{})["data"].(map[string]interface{})

		if safe == "active" && postData["over_18"].(bool) {
			continue
		}

		header := postData["title"].(string)
		description := postData["selftext"].(string)
		if len(description) > 500 {
			description = description[:500] + "..."
		}
		publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
		permalink := postData["permalink"].(string)
		resultURL := fmt.Sprintf("%s%s", baseURL, permalink)

		result := ForumSearchResult{
			URL:           resultURL,
			Header:        header,
			Description:   description,
			PublishedDate: publishedDate,
		}

		thumbnail := postData["thumbnail"].(string)
		if parsedURL, err := url.Parse(thumbnail); err == nil && parsedURL.Scheme != "" {
			result.ImgSrc = postData["url"].(string)
			result.ThumbnailSrc = thumbnail
		}

		results = append(results, result)
	}

	return results, nil
}

func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
	// Start measuring the time for fetching results
	startTime := time.Now()

	// Perform the forum search
	results, err := PerformRedditSearch(query, settings.SafeSearch, page)
	if err != nil || len(results) == 0 {
		log.Printf("No results from primary search, trying other nodes")
		results = tryOtherNodesForForumSearch(query, settings.SafeSearch, settings.SearchLanguage, page)
	}

	// Measure the elapsed time for fetching results
	elapsedTime := time.Since(startTime)

	// Prepare the data to pass to the template
	data := map[string]interface{}{
		"Query":           query,
		"Results":         results,
		"Page":            page,
		"Fetched":         fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
		"HasPrevPage":     page > 1,
		"HasNextPage":     len(results) == 25, // Assuming 25 results per page
		"LanguageOptions": languageOptions,
		"CurrentLang":     settings.SearchLanguage,
		"Theme":           settings.Theme,
		"Safe":            settings.SafeSearch,
		"IsThemeDark":     settings.IsThemeDark,
	}

	// Render the template without measuring the time
	renderTemplate(w, "forums.html", data)
}