added safe search

This commit is contained in:
partisan 2024-03-30 21:59:16 +01:00
parent 7bf2b8e609
commit dfc3600664
3 changed files with 124 additions and 52 deletions

84
main.go
View file

@ -42,14 +42,53 @@ func main() {
}
func handleSearch(w http.ResponseWriter, r *http.Request) {
var query, safe, lang string
// Differentiate between GET and POST requests to correctly extract query, safe, and lang.
if r.Method == "GET" {
// Serve the search page if no query is provided for a GET request
// Or extract the query parameters directly from the URL if present
query = r.URL.Query().Get("q")
if query == "" {
http.ServeFile(w, r, "static/search.html")
return
}
safe = r.URL.Query().Get("safe")
lang = r.URL.Query().Get("lang")
} else if r.Method == "POST" {
// For a POST request, extract form values
query = r.FormValue("q")
safe = r.FormValue("safe")
lang = r.FormValue("lang")
}
// Early return if query is empty
if query == "" {
http.ServeFile(w, r, "static/search.html")
return
}
query := r.FormValue("q")
if query == "" {
http.ServeFile(w, r, "static/search.html")
// Time
start := time.Now()
// Adjust the search URL based on safe search and language settings
var safeParam string
if safe == "active" {
safeParam = "&safe=active"
} else {
safeParam = "&safe=off"
}
var langParam string
if lang != "" {
langParam = "&lr=" + lang
}
searchURL := "https://www.google.com/search?q=" + url.QueryEscape(query) + safeParam + langParam
print(searchURL+"\n")
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
http.Error(w, "Failed to create request", http.StatusInternalServerError)
return
}
@ -65,16 +104,6 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
randIndex := rand.Intn(len(userAgents))
userAgent := userAgents[randIndex]
// Time
start := time.Now()
// Search query
searchURL := "https://www.google.com/search?q=" + url.QueryEscape(query)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
http.Error(w, "Failed to create request", http.StatusInternalServerError)
return
}
req.Header.Set("User-Agent", userAgent)
resp, err := http.DefaultClient.Do(req)
if err != nil {
@ -95,12 +124,9 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
doc.Find(".yuRUbf").Each(func(i int, s *goquery.Selection) {
link := s.Find("a")
href, _ := link.Attr("href")
// Extract header/title
header := link.Find("h3").Text()
// Remove the unwanted text from the header/title
header = strings.TrimSpace(strings.TrimSuffix(header, ""))
// Retrieve corresponding description
descSelection := doc.Find(".VwiC3b").Eq(i)
description := ""
if descSelection.Length() > 0 {
@ -114,44 +140,18 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
})
})
// Retrieve kno-rdesc
kno := ""
knoLink := ""
rdesc := doc.Find(".kno-rdesc")
if rdesc.Length() > 0 {
span := rdesc.Find("span")
kno = span.Text()
descLink := rdesc.Find("a")
knoLink, _ = descLink.Attr("href")
}
// Retrieve featured snippet
snip := ""
snipSpan := doc.Find(".hgKElc")
if snipSpan.Length() > 0 {
snip = snipSpan.Text()
}
elapsed := time.Since(start)
// Prepare data for rendering template
data := struct {
Results []SearchResult
Title string
Query string
Fetched string
Snippet string
KnoRdesc string
RdescLink string
ElapsedTime string
}{
Results: results,
Title: query + " - TailsGo",
Query: query,
Fetched: fmt.Sprintf("Fetched the results in %.2f seconds", elapsed.Seconds()),
Snippet: snip,
KnoRdesc: kno,
RdescLink: knoLink,
ElapsedTime: strconv.FormatFloat(elapsed.Seconds(), 'f', 2, 64),
}