package main import ( "fmt" "net/http" "net/url" "regexp" "sort" "strconv" "strings" "time" ) type Settings struct { UxLang string Safe string } type TorrentSite interface { Name() string Search(query string, category string) ([]TorrentResult, error) } var ( torrentGalaxy TorrentSite nyaa TorrentSite thePirateBay TorrentSite rutor TorrentSite ) func initFileEngines() { torrentGalaxy = nil thePirateBay = nil nyaa = nil // rutor = nil for _, engineName := range config.MetaSearch.Files { switch engineName { case "TorrentGalaxy": torrentGalaxy = NewTorrentGalaxy() case "ThePirateBay": thePirateBay = NewThePirateBay() case "Nyaa": nyaa = NewNyaa() // case "Rutor": // rutor = NewRutor() } } } func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) { startTime := time.Now() cacheKey := CacheKey{Query: query, Page: page, Safe: settings.SafeSearch == "active", Lang: settings.SearchLanguage, Type: "file"} combinedResults := getFileResultsFromCacheOrFetch(cacheKey, query, settings.SafeSearch, settings.SearchLanguage, page) // Sort the results by the number of seeders sort.Slice(combinedResults, func(i, j int) bool { return combinedResults[i].Seeders > combinedResults[j].Seeders }) elapsedTime := time.Since(startTime) // Prepare the data to pass to the template data := map[string]interface{}{ "Results": combinedResults, "Query": query, "Fetched": FormatElapsedTime(elapsedTime), "Category": "all", "Sort": "seed", "Page": page, "HasPrevPage": page >= 1, "HasNextPage": len(combinedResults) > 0, "LanguageOptions": languageOptions, "CurrentLang": settings.SearchLanguage, "Theme": settings.Theme, "Safe": settings.SafeSearch, "IsThemeDark": settings.IsThemeDark, } // Render the template without measuring the time renderTemplate(w, "files.html", data) } func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TorrentResult { cacheChan := make(chan []SearchResult) var combinedResults []TorrentResult go func() { results, exists := resultsCache.Get(cacheKey) if exists { printDebug("Cache hit") cacheChan <- results } else { printDebug("Cache miss") cacheChan <- nil } }() select { case results := <-cacheChan: if results == nil { // Fetch only if the cache miss occurs and Crawler is enabled if config.MetaSearchEnabled { combinedResults = fetchFileResults(query, safe, lang, page) if len(combinedResults) > 0 { resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) } } else { printDebug("Crawler disabled; skipping fetching.") } } else { _, torrentResults, _, _, _ := convertToSpecificResults(results) combinedResults = torrentResults } case <-time.After(2 * time.Second): printDebug("Cache check timeout") if config.MetaSearchEnabled { combinedResults = fetchFileResults(query, safe, lang, page) if len(combinedResults) > 0 { resultsCache.Set(cacheKey, convertToSearchResults(combinedResults)) } } else { printDebug("Crawler disabled; skipping fetching.") } } return combinedResults } func fetchFileResults(query, safe, lang string, page int) []TorrentResult { // If Crawler is disabled, skip fetching from torrent sites if !config.MetaSearchEnabled { printInfo("Crawler is disabled; skipping torrent site fetching.") return []TorrentResult{} } sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor} var results []TorrentResult for _, site := range sites { if site == nil { continue } res, err := site.Search(query, "all") if err != nil { printWarn("Error searching with %s: %v", site.Name(), err) continue } for _, r := range res { r.Magnet = removeMagnetLink(r.Magnet) // Remove "magnet:", perhaps useless now? results = append(results, r) } } return results } func removeMagnetLink(magnet string) string { // Remove the magnet: prefix unconditionally return strings.TrimPrefix(magnet, "magnet:") } func parseInt(s string) int { i, err := strconv.Atoi(s) if err != nil { return 0 } return i } func parseSize(sizeStr string) int64 { sizeStr = strings.TrimSpace(sizeStr) if sizeStr == "" { return 0 } re := regexp.MustCompile(`(?i)([\d.]+)\s*(K?M?G?T?i?B)`) matches := re.FindStringSubmatch(sizeStr) if len(matches) < 3 { printWarn("Error parsing size: invalid format %s", sizeStr) return 0 } numStr := matches[1] unit := strings.ToUpper(matches[2]) var multiplier int64 = 1 switch unit { case "B": multiplier = 1 case "KB", "KIB": multiplier = 1024 case "MB", "MIB": multiplier = 1024 * 1024 case "GB", "GIB": multiplier = 1024 * 1024 * 1024 case "TB", "TIB": multiplier = 1024 * 1024 * 1024 * 1024 default: printWarn("Unknown unit: %s", unit) return 0 } size, err := strconv.ParseFloat(numStr, 64) if err != nil { printWarn("Error parsing size: %v", err) return 0 } return int64(size * float64(multiplier)) } // apparently this is needed so it can announce that magnet link is being used and people start seeding it, but I dont like the fact that I add trackers purposefully func applyTrackers(magnetLink string) string { if magnetLink == "" { return "" } trackers := []string{ "udp://tracker.openbittorrent.com:80/announce", "udp://tracker.opentrackr.org:1337/announce", "udp://tracker.coppersurfer.tk:6969/announce", "udp://tracker.leechers-paradise.org:6969/announce", } for _, tracker := range trackers { magnetLink += "&tr=" + url.QueryEscape(tracker) } return magnetLink } func formatSize(size int64) string { const unit = 1024 if size < unit { return fmt.Sprintf("%d B", size) } div, exp := unit, 0 for n := size / unit; n >= unit; n /= unit { div *= unit exp++ } return fmt.Sprintf("%.1f %siB", float64(size)/float64(div), []string{"K", "M", "G", "T", "P", "E"}[exp]) } func sanitizeFileName(name string) string { // Replace spaces with dashes sanitized := regexp.MustCompile(`\s+`).ReplaceAllString(name, "-") // Remove any characters that are not alphanumeric, dashes, or parentheses sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "") return sanitized }