updated 'config.ini'

This commit is contained in:
partisan 2024-11-26 07:46:03 +01:00
parent 28f71271d7
commit be4f86580e
13 changed files with 635 additions and 208 deletions

41
text.go
View file

@ -73,9 +73,14 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
// Fetch only if the cache miss occurs and Crawler is enabled
if config.CrawlerEnabled {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printInfo("Crawler disabled; skipping fetching.")
}
} else {
textResults, _, _ := convertToSpecificResults(results)
@ -83,9 +88,13 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
}
case <-time.After(2 * time.Second):
printInfo("Cache check timeout")
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
if config.CrawlerEnabled {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
} else {
printInfo("Crawler disabled; skipping fetching.")
}
}
@ -96,9 +105,13 @@ func prefetchPage(query, safe, lang string, page int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "active", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists {
printInfo("Page %d not cached, caching now...", page)
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
if config.CrawlerEnabled {
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
}
} else {
printInfo("Crawler disabled; skipping prefetch for page %d", page)
}
} else {
printInfo("Page %d already cached", page)
@ -107,6 +120,13 @@ func prefetchPage(query, safe, lang string, page int) {
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult
// If Crawler is disabled, do not fetch from search engines
if !config.CrawlerEnabled {
printDebug("Crawler is disabled; skipping search engine fetching.")
return results // Return an empty list
}
engineCount := len(textSearchEngines)
// Determine which engine to use for the current page
@ -117,7 +137,7 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
enginePage := (page-1)/engineCount + 1
// Debug print to verify engine and page number being fetched
printInfo("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
printDebug("Fetching results for overall page %d using engine: %s (engine page %d)", page, engine.Name, enginePage)
// Fetch results from the selected engine
searchResults, _, err := engine.Func(query, safe, lang, enginePage)
@ -146,7 +166,6 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
}
}
// Final debug print to display results count and source
printInfo("Fetched %d results for overall page %d", len(results), page)
return results