caching for "files" and images + moved search result types
This commit is contained in:
parent
5a66f61a4c
commit
7668ee5eca
7 changed files with 253 additions and 124 deletions
124
files.go
124
files.go
|
@ -21,17 +21,6 @@ type TorrentSite interface {
|
|||
Search(query string, category string) ([]TorrentResult, error)
|
||||
}
|
||||
|
||||
type TorrentResult struct {
|
||||
URL string
|
||||
Seeders int
|
||||
Leechers int
|
||||
Magnet string
|
||||
Views int
|
||||
Size string
|
||||
Title string
|
||||
Error string
|
||||
}
|
||||
|
||||
var (
|
||||
torrentGalaxy TorrentSite
|
||||
nyaa TorrentSite
|
||||
|
@ -49,35 +38,10 @@ func initializeTorrentSites() {
|
|||
func handleFileSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
||||
startTime := time.Now()
|
||||
|
||||
settings := Settings{UxLang: lang, Safe: safe}
|
||||
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
||||
results := []TorrentResult{}
|
||||
allErrors := true
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "file"}
|
||||
combinedResults := getFileResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
|
||||
|
||||
for _, site := range sites {
|
||||
if site == nil {
|
||||
continue
|
||||
}
|
||||
res, err := site.Search(query, "all")
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if len(res) > 0 {
|
||||
allErrors = false
|
||||
}
|
||||
for _, r := range res {
|
||||
r.Magnet = url.QueryEscape(removeMagnetLink(r.Magnet)) // Remove "magnet:" and encode url
|
||||
results = append(results, r)
|
||||
}
|
||||
}
|
||||
|
||||
if allErrors || len(results) == 0 || results[len(results)-1].Title == "" || results[len(results)-1].Title == " " {
|
||||
results = []TorrentResult{
|
||||
{Error: "Results are currently unavailable, sorry. Please try again later."},
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(results, func(i, j int) bool { return results[i].Seeders > results[j].Seeders })
|
||||
sort.Slice(combinedResults, func(i, j int) bool { return combinedResults[i].Seeders > combinedResults[j].Seeders })
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
funcMap := template.FuncMap{
|
||||
|
@ -102,19 +66,19 @@ func handleFileSearch(w http.ResponseWriter, query, safe, lang string, page int)
|
|||
Page int
|
||||
Settings Settings
|
||||
}{
|
||||
Results: results,
|
||||
Results: combinedResults,
|
||||
Query: query,
|
||||
Fetched: fmt.Sprintf("%.2f", elapsedTime.Seconds()),
|
||||
Category: "all",
|
||||
Sort: "seed",
|
||||
HasPrevPage: page > 1,
|
||||
HasNextPage: len(results) > 0,
|
||||
HasNextPage: len(combinedResults) > 0,
|
||||
Page: page,
|
||||
Settings: settings,
|
||||
Settings: Settings{UxLang: lang, Safe: safe},
|
||||
}
|
||||
|
||||
// Debugging: Print results before rendering template
|
||||
for _, result := range results {
|
||||
for _, result := range combinedResults {
|
||||
fmt.Printf("Title: %s, Magnet: %s\n", result.Title, result.Magnet)
|
||||
}
|
||||
|
||||
|
@ -124,13 +88,75 @@ func handleFileSearch(w http.ResponseWriter, query, safe, lang string, page int)
|
|||
}
|
||||
}
|
||||
|
||||
//this is so fucking stupid, but it does not work otherwise
|
||||
func removeMagnetLink(magnet string) string {
|
||||
// Remove the magnet: prefix if it exists
|
||||
if strings.HasPrefix(magnet, "magnet:?") {
|
||||
magnet = strings.TrimPrefix(magnet, "magnet:?")
|
||||
func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TorrentResult {
|
||||
cacheChan := make(chan []SearchResult)
|
||||
var combinedResults []TorrentResult
|
||||
|
||||
go func() {
|
||||
results, exists := resultsCache.Get(cacheKey)
|
||||
if exists {
|
||||
log.Println("Cache hit")
|
||||
cacheChan <- results
|
||||
} else {
|
||||
log.Println("Cache miss")
|
||||
cacheChan <- nil
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case results := <-cacheChan:
|
||||
if results == nil {
|
||||
combinedResults = fetchAndCacheFileResults(query, safe, lang, page)
|
||||
} else {
|
||||
_, torrentResults, _ := convertToSpecificResults(results)
|
||||
combinedResults = torrentResults
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
log.Println("Cache check timeout")
|
||||
combinedResults = fetchAndCacheFileResults(query, safe, lang, page)
|
||||
}
|
||||
return magnet
|
||||
|
||||
return combinedResults
|
||||
}
|
||||
|
||||
func fetchAndCacheFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
||||
results := []TorrentResult{}
|
||||
allErrors := true
|
||||
|
||||
for _, site := range sites {
|
||||
if site == nil {
|
||||
continue
|
||||
}
|
||||
res, err := site.Search(query, "all")
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if len(res) > 0 {
|
||||
allErrors = false
|
||||
}
|
||||
for _, r := range res {
|
||||
r.Magnet = url.QueryEscape(removeMagnetLink(r.Magnet)) // Remove "magnet:" and encode url
|
||||
results = append(results, r)
|
||||
}
|
||||
}
|
||||
|
||||
if allErrors || len(results) == 0 || results[len(results)-1].Title == "" || results[len(results)-1].Title == " " {
|
||||
return []TorrentResult{
|
||||
{Error: "Results are currently unavailable, sorry. Please try again later."},
|
||||
}
|
||||
}
|
||||
|
||||
// Cache the valid results
|
||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "file"}
|
||||
resultsCache.Set(cacheKey, convertToSearchResults(results))
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func removeMagnetLink(magnet string) string {
|
||||
// Remove the magnet: prefix unconditionally
|
||||
return strings.TrimPrefix(magnet, "magnet:?")
|
||||
}
|
||||
|
||||
func subtract(a, b int) int {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue