map improvements + added forums search
This commit is contained in:
parent
c848c72aea
commit
8da387f8e9
12 changed files with 424 additions and 65 deletions
149
forums.go
Normal file
149
forums.go
Normal file
|
@ -0,0 +1,149 @@
|
|||
// forums.go
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ForumSearchResult struct {
|
||||
URL string `json:"url"`
|
||||
Header string `json:"header"`
|
||||
Description string `json:"description"`
|
||||
PublishedDate time.Time `json:"publishedDate"`
|
||||
ImgSrc string `json:"imgSrc,omitempty"`
|
||||
ThumbnailSrc string `json:"thumbnailSrc,omitempty"`
|
||||
}
|
||||
|
||||
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
||||
const (
|
||||
pageSize = 25
|
||||
baseURL = "https://www.reddit.com/"
|
||||
maxRetries = 5
|
||||
initialBackoff = 2 * time.Second
|
||||
)
|
||||
var results []ForumSearchResult
|
||||
|
||||
searchURL := fmt.Sprintf("%ssearch.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
|
||||
var resp *http.Response
|
||||
var err error
|
||||
|
||||
// Retry logic with exponential backoff
|
||||
for i := 0; i <= maxRetries; i++ {
|
||||
resp, err = http.Get(searchURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %v", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusTooManyRequests {
|
||||
break
|
||||
}
|
||||
|
||||
// Wait for some time before retrying
|
||||
backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
|
||||
time.Sleep(backoff)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var searchResults map[string]interface{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
|
||||
return nil, fmt.Errorf("decoding response: %v", err)
|
||||
}
|
||||
|
||||
data, ok := searchResults["data"].(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no data field in response")
|
||||
}
|
||||
|
||||
posts, ok := data["children"].([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no children field in data")
|
||||
}
|
||||
|
||||
for _, post := range posts {
|
||||
postData := post.(map[string]interface{})["data"].(map[string]interface{})
|
||||
|
||||
if safe == "active" && postData["over_18"].(bool) {
|
||||
continue
|
||||
}
|
||||
|
||||
header := postData["title"].(string)
|
||||
description := postData["selftext"].(string)
|
||||
if len(description) > 500 {
|
||||
description = description[:500] + "..."
|
||||
}
|
||||
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
|
||||
permalink := postData["permalink"].(string)
|
||||
resultURL := baseURL + permalink
|
||||
|
||||
result := ForumSearchResult{
|
||||
URL: resultURL,
|
||||
Header: header,
|
||||
Description: description,
|
||||
PublishedDate: publishedDate,
|
||||
}
|
||||
|
||||
thumbnail := postData["thumbnail"].(string)
|
||||
if parsedURL, err := url.Parse(thumbnail); err == nil && parsedURL.Scheme != "" {
|
||||
result.ImgSrc = postData["url"].(string)
|
||||
result.ThumbnailSrc = thumbnail
|
||||
}
|
||||
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func handleForumsSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
||||
results, err := PerformRedditSearch(query, safe, page)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error performing search: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Query string
|
||||
Results []ForumSearchResult
|
||||
LanguageOptions []LanguageOption
|
||||
CurrentLang string
|
||||
Page int
|
||||
HasPrevPage bool
|
||||
HasNextPage bool
|
||||
}{
|
||||
Query: query,
|
||||
Results: results,
|
||||
LanguageOptions: languageOptions,
|
||||
CurrentLang: lang,
|
||||
Page: page,
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(results) == 25,
|
||||
}
|
||||
|
||||
funcMap := template.FuncMap{
|
||||
"sub": func(a, b int) int { return a - b },
|
||||
"add": func(a, b int) int { return a + b },
|
||||
}
|
||||
|
||||
tmpl, err := template.New("forums.html").Funcs(funcMap).ParseFiles("templates/forums.html")
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error loading template: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err := tmpl.Execute(w, data); err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error rendering template: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue