cleanup
This commit is contained in:
parent
3d47c80446
commit
49f613ddeb
15 changed files with 98 additions and 234 deletions
|
@ -116,7 +116,7 @@ func cacheImage(imageURL, filename string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", fmt.Errorf("failed to decode image: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure the cache directory exists
|
// Ensure the cache directory exists
|
||||||
|
@ -206,12 +206,23 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
printDebug("Status map: %v", statusMap)
|
printDebug("Status map: %v", statusMap)
|
||||||
|
|
||||||
for _, id := range ids {
|
for _, id := range ids {
|
||||||
filename := id + ".webp"
|
// Check for different possible extensions
|
||||||
cachedImagePath := filepath.Join(cacheDir, filename)
|
extensions := []string{".webp", ".svg"}
|
||||||
|
var cachedImagePath string
|
||||||
|
var found bool
|
||||||
|
|
||||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
for _, ext := range extensions {
|
||||||
// Image is cached and ready
|
filename := id + ext
|
||||||
statusMap[id] = "/image_cache/" + filename
|
path := filepath.Join(cacheDir, filename)
|
||||||
|
if _, err := os.Stat(path); err == nil {
|
||||||
|
cachedImagePath = "/image_cache/" + filename
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if found {
|
||||||
|
statusMap[id] = cachedImagePath
|
||||||
} else {
|
} else {
|
||||||
// Image is not ready
|
// Image is not ready
|
||||||
statusMap[id] = ""
|
statusMap[id] = ""
|
||||||
|
|
9
cache.go
9
cache.go
|
@ -26,13 +26,14 @@ type TextSearchResult struct {
|
||||||
|
|
||||||
type ImageSearchResult struct {
|
type ImageSearchResult struct {
|
||||||
ID string
|
ID string
|
||||||
Thumbnail string
|
|
||||||
Title string
|
Title string
|
||||||
Media string
|
Full string // Full-size image URL
|
||||||
|
Thumb string // Thumbnail image URL
|
||||||
|
ProxyFull string // Proxied full-size image URL
|
||||||
|
ProxyThumb string // Proxied thumbnail image URL (from cache)
|
||||||
|
Source string // Source webpage URL
|
||||||
Width int
|
Width int
|
||||||
Height int
|
Height int
|
||||||
Source string
|
|
||||||
ThumbProxy string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type VideoResult struct {
|
type VideoResult struct {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"html/template"
|
"html/template"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -26,8 +27,14 @@ var (
|
||||||
return string(jsonBytes), nil
|
return string(jsonBytes), nil
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
searchEngines []SearchEngine
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type SearchEngine struct {
|
||||||
|
Name string
|
||||||
|
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
|
||||||
|
}
|
||||||
|
|
||||||
// Helper function to render templates without elapsed time measurement
|
// Helper function to render templates without elapsed time measurement
|
||||||
func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]interface{}) {
|
func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]interface{}) {
|
||||||
// Parse the template with common functions (including translate)
|
// Parse the template with common functions (including translate)
|
||||||
|
|
|
@ -68,13 +68,15 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
|
||||||
mediaURL, ok := data["murl"].(string)
|
mediaURL, ok := data["murl"].(string)
|
||||||
if ok {
|
if ok {
|
||||||
// Apply the image proxy
|
// Apply the image proxy
|
||||||
proxiedURL := "/imgproxy?url=" + mediaURL
|
proxiedFullURL := "/imgproxy?url=" + imgSrc
|
||||||
|
proxiedThumbURL := "/imgproxy?url=" + mediaURL
|
||||||
results = append(results, ImageSearchResult{
|
results = append(results, ImageSearchResult{
|
||||||
Thumbnail: imgSrc,
|
Thumb: imgSrc,
|
||||||
Title: strings.TrimSpace(title),
|
Title: strings.TrimSpace(title),
|
||||||
Media: mediaURL,
|
Full: imgSrc,
|
||||||
Source: mediaURL,
|
Source: mediaURL,
|
||||||
ThumbProxy: proxiedURL, // Use the proxied URL
|
ProxyFull: proxiedFullURL, // Proxied full-size image URL
|
||||||
|
ProxyThumb: proxiedThumbURL, // Proxied thumbnail URL
|
||||||
Width: width,
|
Width: width,
|
||||||
Height: height,
|
Height: height,
|
||||||
})
|
})
|
||||||
|
|
|
@ -152,11 +152,12 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
|
||||||
if isValidImageURL(imgSrc, DeviantArtImageUserAgent, resultURL) {
|
if isValidImageURL(imgSrc, DeviantArtImageUserAgent, resultURL) {
|
||||||
resultsChan <- ImageSearchResult{
|
resultsChan <- ImageSearchResult{
|
||||||
Title: strings.TrimSpace(title),
|
Title: strings.TrimSpace(title),
|
||||||
Media: imgSrc,
|
Full: imgSrc,
|
||||||
Width: 0,
|
Width: 0,
|
||||||
Height: 0,
|
Height: 0,
|
||||||
Source: resultURL,
|
Source: resultURL,
|
||||||
ThumbProxy: "/imgproxy?url=" + imgSrc,
|
ProxyThumb: "/imgproxy?url=" + imgSrc, // Proxied thumbnail
|
||||||
|
ProxyFull: "/imgproxy?url=" + imgSrc, // Proxied full-size image
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}(imgSrc, resultURL, title)
|
}(imgSrc, resultURL, title)
|
||||||
|
|
|
@ -64,14 +64,19 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
width, _ := strconv.Atoi(s.Find("a img").AttrOr("width", "0"))
|
width, _ := strconv.Atoi(s.Find("a img").AttrOr("width", "0"))
|
||||||
height, _ := strconv.Atoi(s.Find("a img").AttrOr("height", "0"))
|
height, _ := strconv.Atoi(s.Find("a img").AttrOr("height", "0"))
|
||||||
|
|
||||||
|
// Generate proxied URLs
|
||||||
|
proxyFullURL := "/imgproxy?url=" + url.QueryEscape(imgSrc)
|
||||||
|
proxyThumbURL := "/imgproxy?url=" + url.QueryEscape(thumbnailSrc)
|
||||||
|
|
||||||
results = append(results, ImageSearchResult{
|
results = append(results, ImageSearchResult{
|
||||||
Thumbnail: thumbnailSrc,
|
Thumb: thumbnailSrc,
|
||||||
Title: strings.TrimSpace(title),
|
Title: strings.TrimSpace(title),
|
||||||
Media: imgSrc,
|
Full: imgSrc,
|
||||||
Width: width,
|
Width: width,
|
||||||
Height: height,
|
Height: height,
|
||||||
Source: "https://imgur.com" + urlPath,
|
Source: "https://imgur.com" + urlPath,
|
||||||
ThumbProxy: imgSrc, //"/img_proxy?url=" + url.QueryEscape(imgSrc)
|
ProxyFull: proxyFullURL,
|
||||||
|
ProxyThumb: proxyThumbURL,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -14,12 +14,12 @@ type QwantAPIResponse struct {
|
||||||
Data struct {
|
Data struct {
|
||||||
Result struct {
|
Result struct {
|
||||||
Items []struct {
|
Items []struct {
|
||||||
Media string `json:"media"`
|
Media string `json:"media"`
|
||||||
Thumbnail string `json:"thumbnail"`
|
//Thumbnail string `json:"thumbnail"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
Width int `json:"width"`
|
Width int `json:"width"`
|
||||||
Height int `json:"height"`
|
Height int `json:"height"`
|
||||||
} `json:"items"`
|
} `json:"items"`
|
||||||
} `json:"result"`
|
} `json:"result"`
|
||||||
} `json:"data"`
|
} `json:"data"`
|
||||||
|
@ -125,7 +125,7 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
req.Header.Set("User-Agent", ImageUserAgent)
|
req.Header.Set("User-Agent", ImageUserAgent) // Quant seems to not like some specific User-Agent strings
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -148,22 +148,23 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
for i, item := range apiResp.Data.Result.Items {
|
for i, item := range apiResp.Data.Result.Items {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func(i int, item struct {
|
go func(i int, item struct {
|
||||||
Media string `json:"media"`
|
Media string `json:"media"`
|
||||||
Thumbnail string `json:"thumbnail"`
|
//Thumbnail string `json:"thumbnail"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
Width int `json:"width"`
|
Width int `json:"width"`
|
||||||
Height int `json:"height"`
|
Height int `json:"height"`
|
||||||
}) {
|
}) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
|
|
||||||
// Populate the result
|
// Populate the result
|
||||||
results[i] = ImageSearchResult{
|
results[i] = ImageSearchResult{
|
||||||
Thumbnail: item.Thumbnail,
|
Thumb: item.Media, // item.Thumbnail is not working
|
||||||
Title: item.Title,
|
Title: item.Title,
|
||||||
Media: item.Media,
|
Full: item.Media,
|
||||||
Source: item.Url,
|
Source: item.Url,
|
||||||
ThumbProxy: "/imgproxy?url=" + item.Media,
|
ProxyFull: "/imgproxy?url=" + item.Media,
|
||||||
|
ProxyThumb: "/imgproxy?url=" + item.Media,
|
||||||
Width: item.Width,
|
Width: item.Width,
|
||||||
Height: item.Height,
|
Height: item.Height,
|
||||||
}
|
}
|
||||||
|
|
23
images.go
23
images.go
|
@ -12,9 +12,9 @@ var imageSearchEngines []SearchEngine
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
imageSearchEngines = []SearchEngine{
|
imageSearchEngines = []SearchEngine{
|
||||||
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch), Weight: 1},
|
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
|
||||||
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch), Weight: 2},
|
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
|
||||||
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch), Weight: 3},
|
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
|
||||||
//{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
|
//{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -90,8 +90,7 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||||
for _, engine := range imageSearchEngines {
|
for _, engine := range imageSearchEngines {
|
||||||
printInfo("Using image search engine: %s", engine.Name)
|
printInfo("Using image search engine: %s", engine.Name)
|
||||||
|
|
||||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
searchResults, _, err := engine.Func(query, safe, lang, page)
|
||||||
updateEngineMetrics(&engine, duration, err == nil)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error performing image search with %s: %v", engine.Name, err)
|
printWarn("Error performing image search with %s: %v", engine.Name, err)
|
||||||
continue
|
continue
|
||||||
|
@ -100,19 +99,15 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||||
for _, result := range searchResults {
|
for _, result := range searchResults {
|
||||||
imageResult := result.(ImageSearchResult)
|
imageResult := result.(ImageSearchResult)
|
||||||
if config.HardCacheDuration > 0 {
|
if config.HardCacheDuration > 0 {
|
||||||
// Save the original Media URL before overwriting
|
// Generate hash from the original full-size image URL
|
||||||
originalMediaURL := imageResult.Media
|
|
||||||
|
|
||||||
// Generate hash from the original media URL
|
|
||||||
hasher := md5.New()
|
hasher := md5.New()
|
||||||
hasher.Write([]byte(originalMediaURL))
|
hasher.Write([]byte(imageResult.Full))
|
||||||
hash := hex.EncodeToString(hasher.Sum(nil))
|
hash := hex.EncodeToString(hasher.Sum(nil))
|
||||||
filename := hash + ".webp"
|
filename := hash + ".webp"
|
||||||
|
|
||||||
// Set the Media URL to point to the cached image path
|
// Set the Full URL to point to the cached image path
|
||||||
cacheURL := "/image_cache/" + filename
|
cacheURL := "/image_cache/" + filename
|
||||||
imageResult.Media = cacheURL
|
imageResult.ProxyFull = cacheURL
|
||||||
imageResult.ThumbProxy = cacheURL
|
|
||||||
|
|
||||||
// Assign the ID
|
// Assign the ID
|
||||||
imageResult.ID = hash
|
imageResult.ID = hash
|
||||||
|
@ -123,7 +118,7 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||||
}
|
}
|
||||||
}(originalMediaURL, filename)
|
}(imageResult.Full, filename)
|
||||||
}
|
}
|
||||||
results = append(results, imageResult)
|
results = append(results, imageResult)
|
||||||
}
|
}
|
||||||
|
|
6
init.go
6
init.go
|
@ -15,6 +15,7 @@ type Config struct {
|
||||||
WebsiteEnabled bool
|
WebsiteEnabled bool
|
||||||
LogLevel int
|
LogLevel int
|
||||||
HardCacheDuration time.Duration
|
HardCacheDuration time.Duration
|
||||||
|
HardCacheEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultConfig = Config{
|
var defaultConfig = Config{
|
||||||
|
@ -27,6 +28,7 @@ var defaultConfig = Config{
|
||||||
WebsiteEnabled: true,
|
WebsiteEnabled: true,
|
||||||
LogLevel: 1,
|
LogLevel: 1,
|
||||||
HardCacheDuration: 0,
|
HardCacheDuration: 0,
|
||||||
|
HardCacheEnabled: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
const configFilePath = "config.ini"
|
const configFilePath = "config.ini"
|
||||||
|
@ -62,6 +64,10 @@ func main() {
|
||||||
startElection()
|
startElection()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if config.HardCacheDuration > 0 {
|
||||||
|
config.HardCacheEnabled = true
|
||||||
|
}
|
||||||
|
|
||||||
go startNodeClient()
|
go startNodeClient()
|
||||||
|
|
||||||
runServer()
|
runServer()
|
||||||
|
|
2
run.bat
2
run.bat
|
@ -5,7 +5,7 @@ rem Directory where the Go files are located
|
||||||
set GO_DIR=C:\path\to\your\go\files
|
set GO_DIR=C:\path\to\your\go\files
|
||||||
|
|
||||||
rem Explicitly list the main files in the required order
|
rem Explicitly list the main files in the required order
|
||||||
set FILES=main.go init.go search-engine.go text.go text-google.go text-librex.go text-brave.go text-duckduckgo.go common.go cache.go agent.go files.go files-thepiratebay.go files-torrentgalaxy.go forums.go get-searchxng.go imageproxy.go images.go images-imgur.go images-quant.go map.go node.go open-search.go video.go
|
set FILES=main.go init.go text.go text-google.go text-librex.go text-brave.go text-duckduckgo.go common.go cache.go agent.go files.go files-thepiratebay.go files-torrentgalaxy.go forums.go get-searchxng.go imageproxy.go images.go images-imgur.go images-quant.go map.go node.go open-search.go video.go
|
||||||
|
|
||||||
rem Change to the directory with the Go files
|
rem Change to the directory with the Go files
|
||||||
pushd %GO_DIR%
|
pushd %GO_DIR%
|
||||||
|
|
1
run.sh
1
run.sh
|
@ -4,7 +4,6 @@
|
||||||
FILES="
|
FILES="
|
||||||
./main.go
|
./main.go
|
||||||
./init.go
|
./init.go
|
||||||
./search-engine.go
|
|
||||||
./text.go
|
./text.go
|
||||||
./text-google.go
|
./text-google.go
|
||||||
./text-librex.go
|
./text-librex.go
|
||||||
|
|
151
search-engine.go
151
search-engine.go
|
@ -1,151 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"math/rand"
|
|
||||||
"net/http"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
searchEngineLock sync.Mutex
|
|
||||||
searchEngines []SearchEngine // Ensure this variable is defined
|
|
||||||
)
|
|
||||||
|
|
||||||
// SearchEngine struct now includes metrics for calculating reputation.
|
|
||||||
type SearchEngine struct {
|
|
||||||
Name string
|
|
||||||
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
|
|
||||||
Weight int
|
|
||||||
TotalRequests int
|
|
||||||
TotalTime time.Duration
|
|
||||||
SuccessfulSearches int
|
|
||||||
FailedSearches int
|
|
||||||
IsCrawler bool // Indicates if this search engine is a crawler
|
|
||||||
Host string // Host of the crawler
|
|
||||||
Port int // Port of the crawler
|
|
||||||
AuthCode string // Auth code for the crawler
|
|
||||||
}
|
|
||||||
|
|
||||||
// init function seeds the random number generator.
|
|
||||||
func init() {
|
|
||||||
rand.Seed(time.Now().UnixNano())
|
|
||||||
// Initialize the searchEngines list
|
|
||||||
searchEngines = []SearchEngine{
|
|
||||||
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
|
|
||||||
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
|
|
||||||
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
|
|
||||||
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5},
|
|
||||||
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Selects a search engine based on weighted random selection with dynamic weighting.
|
|
||||||
func selectSearchEngine(engines []SearchEngine) SearchEngine {
|
|
||||||
searchEngineLock.Lock()
|
|
||||||
defer searchEngineLock.Unlock()
|
|
||||||
|
|
||||||
// Recalculate weights based on average response time and success rate.
|
|
||||||
for i := range engines {
|
|
||||||
engines[i].Weight = calculateReputation(engines[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
totalWeight := 0
|
|
||||||
for _, engine := range engines {
|
|
||||||
totalWeight += engine.Weight
|
|
||||||
}
|
|
||||||
|
|
||||||
randValue := rand.Intn(totalWeight)
|
|
||||||
for _, engine := range engines {
|
|
||||||
if randValue < engine.Weight {
|
|
||||||
return engine
|
|
||||||
}
|
|
||||||
randValue -= engine.Weight
|
|
||||||
}
|
|
||||||
|
|
||||||
return engines[0] // fallback to the first engine
|
|
||||||
}
|
|
||||||
|
|
||||||
// Updates the engine's performance metrics.
|
|
||||||
func updateEngineMetrics(engine *SearchEngine, responseTime time.Duration, success bool) {
|
|
||||||
searchEngineLock.Lock()
|
|
||||||
defer searchEngineLock.Unlock()
|
|
||||||
|
|
||||||
engine.TotalRequests++
|
|
||||||
engine.TotalTime += responseTime
|
|
||||||
if success {
|
|
||||||
engine.SuccessfulSearches++
|
|
||||||
} else {
|
|
||||||
engine.FailedSearches++
|
|
||||||
}
|
|
||||||
engine.Weight = calculateReputation(*engine)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculates the reputation of the search engine based on average response time and success rate.
|
|
||||||
func calculateReputation(engine SearchEngine) int {
|
|
||||||
const referenceTime = time.Second // 1 second reference time in nanoseconds (1000 ms)
|
|
||||||
|
|
||||||
if engine.TotalRequests == 0 {
|
|
||||||
return 10 // Default weight for new engines
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate average response time in seconds.
|
|
||||||
avgResponseTime := engine.TotalTime.Seconds() / float64(engine.TotalRequests)
|
|
||||||
|
|
||||||
// Calculate success rate.
|
|
||||||
successRate := float64(engine.SuccessfulSearches) / float64(engine.TotalRequests)
|
|
||||||
|
|
||||||
// Combine response time and success rate into a single reputation score.
|
|
||||||
// The formula can be adjusted to weigh response time and success rate differently.
|
|
||||||
reputation := (referenceTime.Seconds() / avgResponseTime) * successRate
|
|
||||||
|
|
||||||
// Scale reputation for better interpretability (e.g., multiply by 10)
|
|
||||||
return int(reputation * 10)
|
|
||||||
}
|
|
||||||
|
|
||||||
func fetchSearchResults(query, safe, lang, searchType string, page int) []SearchResult {
|
|
||||||
var results []SearchResult
|
|
||||||
|
|
||||||
engine := selectSearchEngine(searchEngines)
|
|
||||||
log.Printf("Using search engine: %s", engine.Name)
|
|
||||||
|
|
||||||
if engine.IsCrawler {
|
|
||||||
searchResults, duration, err := fetchSearchFromCrawler(engine, query, safe, lang, searchType, page)
|
|
||||||
updateEngineMetrics(&engine, duration, err == nil)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error performing search with crawler %s: %v", engine.Name, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
results = append(results, searchResults...)
|
|
||||||
} else {
|
|
||||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
|
||||||
updateEngineMetrics(&engine, duration, err == nil)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error performing search with %s: %v", engine.Name, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
results = append(results, searchResults...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
func fetchSearchFromCrawler(engine SearchEngine, query, safe, lang, searchType string, page int) ([]SearchResult, time.Duration, error) {
|
|
||||||
url := fmt.Sprintf("http://%s:%d/search?q=%s&safe=%s&lang=%s&t=%s&p=%d", engine.Host, engine.Port, query, safe, lang, searchType, page)
|
|
||||||
start := time.Now()
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, 0, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
var results []SearchResult
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&results); err != nil {
|
|
||||||
return nil, 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, time.Since(start), nil
|
|
||||||
}
|
|
|
@ -1,17 +1,2 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
<svg width="800px" height="800px" viewBox="0 0 24 24" id="image" data-name="multi color" xmlns="http://www.w3.org/2000/svg" class="icon multi-color"><polygon id="tertiary-fill" points="3.29 19.71 9 14 11 16 14 13 20.71 19.71 3.29 19.71" style="fill: #b7b7b7; stroke-width: 2;"></polygon><path id="primary-stroke" d="M20,20H4a1,1,0,0,1-1-1V5A1,1,0,0,1,4,4H20a1,1,0,0,1,1,1V19A1,1,0,0,1,20,20Zm.71-.29L14,13l-3,3L9,14,3.29,19.71Z" style="fill: none; stroke: rgb(0, 0, 0); stroke-linecap: round; stroke-linejoin: round; stroke-width: 2;"></path><circle id="secondary-fill" cx="11" cy="9" r="1" style="fill: rgb(44, 169, 188); stroke: rgb(246, 146, 30); stroke-linecap: round; stroke-linejoin: round; stroke-width: 2;"></circle></svg>
|
||||||
<svg width="800px" height="800px" viewBox="0 0 32 32" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
|
|
||||||
|
|
||||||
<title>image-picture</title>
|
|
||||||
<desc>Created with Sketch Beta.</desc>
|
|
||||||
<defs>
|
|
||||||
|
|
||||||
</defs>
|
|
||||||
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
|
|
||||||
<g id="Icon-Set-Filled" sketch:type="MSLayerGroup" transform="translate(-362.000000, -101.000000)" fill="#000000">
|
|
||||||
<path d="M392,129 C392,130.104 391.104,131 390,131 L384.832,131 L377.464,123.535 L386,114.999 L392,120.999 L392,129 L392,129 Z M366,131 C364.896,131 364,130.104 364,129 L364,128.061 L371.945,120.945 L382.001,131 L366,131 L366,131 Z M370,105 C372.209,105 374,106.791 374,109 C374,111.209 372.209,113 370,113 C367.791,113 366,111.209 366,109 C366,106.791 367.791,105 370,105 L370,105 Z M390,101 L366,101 C363.791,101 362,102.791 362,105 L362,129 C362,131.209 363.791,133 366,133 L390,133 C392.209,133 394,131.209 394,129 L394,105 C394,102.791 392.209,101 390,101 L390,101 Z M370,111 C371.104,111 372,110.104 372,109 C372,107.896 371.104,107 370,107 C368.896,107 368,107.896 368,109 C368,110.104 368.896,111 370,111 L370,111 Z" id="image-picture" sketch:type="MSShapeGroup">
|
|
||||||
|
|
||||||
</path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 769 B |
|
@ -78,9 +78,10 @@
|
||||||
src="/static/images/placeholder.svg"
|
src="/static/images/placeholder.svg"
|
||||||
data-id="{{ $result.ID }}"
|
data-id="{{ $result.ID }}"
|
||||||
alt="{{ .Title }}"
|
alt="{{ .Title }}"
|
||||||
data-media="{{ .Media }}"
|
data-full="{{ .ProxyFull }}"
|
||||||
|
data-proxy-full="{{ .ProxyThumb }}"
|
||||||
class="clickable"
|
class="clickable"
|
||||||
>
|
/>
|
||||||
<div class="resolution">{{ .Width }} × {{ .Height }}</div>
|
<div class="resolution">{{ .Width }} × {{ .Height }}</div>
|
||||||
<div class="details">
|
<div class="details">
|
||||||
<span class="img_title clickable">{{ .Title }}</span>
|
<span class="img_title clickable">{{ .Title }}</span>
|
||||||
|
@ -159,12 +160,12 @@
|
||||||
if (!parentImageDiv) return;
|
if (!parentImageDiv) return;
|
||||||
|
|
||||||
const imgElement = parentImageDiv.querySelector('img.clickable');
|
const imgElement = parentImageDiv.querySelector('img.clickable');
|
||||||
const mediaUrl = imgElement.dataset.media; // Full-size image URL
|
const fullImageUrl = imgElement.dataset.proxyFull; // Use data-proxy-full for ProxyFull
|
||||||
const proxyMediaUrl = imgElement.dataset.proxyMedia || imgElement.src; // Proxied full-size image URL or thumbnail proxy
|
const thumbnailUrl = imgElement.src; // Use ProxyThumb for the thumbnail
|
||||||
const title = imgElement.alt;
|
const title = imgElement.alt;
|
||||||
const sourceUrl = parentImageDiv.querySelector('.img_source').href; // Source website URL
|
const sourceUrl = parentImageDiv.querySelector('.img_source').href; // Source webpage URL
|
||||||
|
|
||||||
if (!mediaUrl || viewerOpen) {
|
if (!fullImageUrl || viewerOpen) {
|
||||||
return; // Don't open if data is missing or viewer is already open
|
return; // Don't open if data is missing or viewer is already open
|
||||||
}
|
}
|
||||||
viewerOpen = true;
|
viewerOpen = true;
|
||||||
|
@ -175,11 +176,12 @@
|
||||||
const fullSizeLink = imageView.querySelector('.full-size');
|
const fullSizeLink = imageView.querySelector('.full-size');
|
||||||
const proxySizeLink = imageView.querySelector('.proxy-size');
|
const proxySizeLink = imageView.querySelector('.proxy-size');
|
||||||
|
|
||||||
viewerImage.src = mediaUrl;
|
// Set the viewer image to ProxyFull
|
||||||
|
viewerImage.src = fullImageUrl;
|
||||||
viewerTitle.textContent = title;
|
viewerTitle.textContent = title;
|
||||||
viewerSourceButton.href = sourceUrl;
|
viewerSourceButton.href = sourceUrl;
|
||||||
fullSizeLink.href = sourceUrl; // Link to the source website
|
fullSizeLink.href = sourceUrl; // Link to the source website
|
||||||
proxySizeLink.href = proxyMediaUrl; // Link to the proxied full-size image
|
proxySizeLink.href = fullImageUrl; // Link to the proxied full-size image
|
||||||
|
|
||||||
viewerOverlay.style.display = 'flex';
|
viewerOverlay.style.display = 'flex';
|
||||||
imageView.classList.remove('image_hide');
|
imageView.classList.remove('image_hide');
|
||||||
|
@ -221,6 +223,7 @@
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<!-- JavaScript to Load Images -->
|
<!-- JavaScript to Load Images -->
|
||||||
<script>
|
<script>
|
||||||
|
|
11
text.go
11
text.go
|
@ -10,10 +10,10 @@ var textSearchEngines []SearchEngine
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
textSearchEngines = []SearchEngine{
|
textSearchEngines = []SearchEngine{
|
||||||
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
|
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch)},
|
||||||
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
|
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch)},
|
||||||
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
|
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch)},
|
||||||
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5},
|
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch)},
|
||||||
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -111,8 +111,7 @@ func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||||
for _, engine := range textSearchEngines {
|
for _, engine := range textSearchEngines {
|
||||||
printInfo("Using search engine: %s", engine.Name)
|
printInfo("Using search engine: %s", engine.Name)
|
||||||
|
|
||||||
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
searchResults, _, err := engine.Func(query, safe, lang, page)
|
||||||
updateEngineMetrics(&engine, duration, err == nil)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error performing search with %s: %v", engine.Name, err)
|
printWarn("Error performing search with %s: %v", engine.Name, err)
|
||||||
continue
|
continue
|
||||||
|
|
Loading…
Add table
Reference in a new issue