Search/files.go
partisan 614ce8903e
All checks were successful
Run Integration Tests / test (push) Successful in 33s
added SOCKS5 proxy support
2025-01-12 16:46:52 +01:00

249 lines
6.8 KiB
Go
Executable file

package main
import (
"fmt"
"net/http"
"net/url"
"regexp"
"sort"
"strconv"
"strings"
"time"
)
type Settings struct {
UxLang string
Safe string
}
type TorrentSite interface {
Name() string
Search(query string, category string) ([]TorrentResult, error)
}
var (
torrentGalaxy TorrentSite
nyaa TorrentSite
thePirateBay TorrentSite
rutor TorrentSite
)
var fileResultsChan = make(chan []TorrentResult)
func init() {
torrentGalaxy = NewTorrentGalaxy()
// nyaa = NewNyaa()
thePirateBay = NewThePirateBay()
// rutor = NewRutor()
}
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
startTime := time.Now()
cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "file"}
combinedResults := getFileResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page)
// Sort the results by the number of seeders
sort.Slice(combinedResults, func(i, j int) bool { return combinedResults[i].Seeders > combinedResults[j].Seeders })
elapsedTime := time.Since(startTime)
// Prepare the data to pass to the template
data := map[string]interface{}{
"Results": combinedResults,
"Query": query,
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
"Category": "all",
"Sort": "seed",
"Page": page,
"HasPrevPage": page >= 1,
"HasNextPage": len(combinedResults) > 0,
"LanguageOptions": languageOptions,
"CurrentLang": settings.SearchLanguage,
"Theme": settings.Theme,
"Safe": settings.SafeSearch,
"IsThemeDark": settings.IsThemeDark,
}
// Render the template without measuring the time
renderTemplate(w, "files.html", data)
}
func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TorrentResult {
cacheChan := make(chan []SearchResult)
var combinedResults []TorrentResult
go func() {
results, exists := resultsCache.Get(cacheKey)
if exists {
printDebug("Cache hit")
cacheChan <- results
} else {
printDebug("Cache miss")
cacheChan <- nil
}
}()
select {
case results := <-cacheChan:
if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled
if config.MetaSearchEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printDebug("Crawler disabled; skipping fetching.")
}
} else {
_, torrentResults, _, _ := convertToSpecificResults(results)
combinedResults = torrentResults
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
if config.MetaSearchEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printDebug("Crawler disabled; skipping fetching.")
}
}
return combinedResults
}
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
// If Crawler is disabled, skip fetching from torrent sites
if !config.MetaSearchEnabled {
printInfo("Crawler is disabled; skipping torrent site fetching.")
return []TorrentResult{}
}
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
results := []TorrentResult{}
for _, site := range sites {
if site == nil {
continue
}
res, err := site.Search(query, "all")
if err != nil {
printWarn("Error searching with %s: %v", site.Name(), err)
continue
}
for _, r := range res {
r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", perhaps useless now?
results = append(results, r)
}
}
if len(results) == 0 {
printWarn("No file results found for query: %s, trying other nodes", query)
results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
}
return results
}
func removeMagnetLink(magnet string) string {
// Remove the magnet: prefix unconditionally
return strings.TrimPrefix(magnet, "magnet:")
}
func parseInt(s string) int {
i, err := strconv.Atoi(s)
if err != nil {
return 0
}
return i
}
func parseSize(sizeStr string) int64 {
sizeStr = strings.TrimSpace(sizeStr)
if sizeStr == "" {
return 0
}
// Use regex to extract numeric value and unit separately
re := regexp.MustCompile(`(?i)([\d.]+)\s*([KMGT]?B)`)
matches := re.FindStringSubmatch(sizeStr)
if len(matches) < 3 {
printWarn("Error parsing size: invalid format %s", sizeStr)
return 0
}
sizeStr = matches[1]
unit := strings.ToUpper(matches[2])
var multiplier int64 = 1
switch unit {
case "KB":
multiplier = 1024
case "MB":
multiplier = 1024 * 1024
case "GB":
multiplier = 1024 * 1024 * 1024
case "TB":
multiplier = 1024 * 1024 * 1024 * 1024
default:
printWarn("Unknown unit: %s", unit)
return 0
}
size, err := strconv.ParseFloat(sizeStr, 64)
if err != nil {
printWarn("Error parsing size: %v", err)
return 0
}
return int64(size * float64(multiplier))
}
// apparently this is needed so it can announce that magnet link is being used and people start seeding it, but I dont like the fact that I add trackers purposefully
func applyTrackers(magnetLink string) string {
if magnetLink == "" {
return ""
}
trackers := []string{
"udp://tracker.openbittorrent.com:80/announce",
"udp://tracker.opentrackr.org:1337/announce",
"udp://tracker.coppersurfer.tk:6969/announce",
"udp://tracker.leechers-paradise.org:6969/announce",
}
for _, tracker := range trackers {
magnetLink += "&tr=" + url.QueryEscape(tracker)
}
return magnetLink
}
func formatSize(size int64) string {
if size >= 1024*1024*1024*1024 {
return fmt.Sprintf("%.2f TB", float64(size)/(1024*1024*1024*1024))
} else if size >= 1024*1024*1024 {
return fmt.Sprintf("%.2f GB", float64(size)/(1024*1024*1024))
} else if size >= 1024*1024 {
return fmt.Sprintf("%.2f MB", float64(size)/(1024*1024))
} else if size >= 1024 {
return fmt.Sprintf("%.2f KB", float64(size)/1024)
}
return fmt.Sprintf("%d B", size)
}
func sanitizeFileName(name string) string {
// Replace spaces with dashes
sanitized := regexp.MustCompile(`\s+`).ReplaceAllString(name, "-")
// Remove any characters that are not alphanumeric, dashes, or parentheses
sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "")
return sanitized
}
func contains(slice []string, item string) bool {
for _, v := range slice {
if v == item {
return true
}
}
return false
}