diff --git a/.forgejo/workflows/release.yaml b/.forgejo/workflows/release.yaml
new file mode 100644
index 0000000..c106815
--- /dev/null
+++ b/.forgejo/workflows/release.yaml
@@ -0,0 +1,158 @@
+name: QGato CLI Release Build
+
+on:
+ workflow_dispatch: {}
+
+jobs:
+ build:
+ runs-on: debian
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v4
+
+ - name: Install latest Go
+ run: |
+ echo "📡 Fetching latest Go version string from go.dev..."
+ GO_FULL=$(curl -sSL https://go.dev/VERSION?m=text | head -n1)
+ echo "🔍 Latest Go version: $GO_FULL"
+
+ ARCHIVE="${GO_FULL}.linux-amd64.tar.gz"
+ curl -LO "https://go.dev/dl/${ARCHIVE}"
+
+ mkdir -p "$HOME/.local/go"
+ tar -C "$HOME/.local/go" --strip-components=1 -xzf "$ARCHIVE"
+
+ echo "$HOME/.local/go/bin" >> $GITHUB_PATH
+ echo "✅ Go installed locally to $HOME/.local/go"
+
+ - name: Check Go version
+ run: go version
+
+ - name: Extract version from version.txt
+ id: version
+ run: |
+ VERSION=$(cat version.txt)
+ VERSION="v${VERSION#v}"
+ echo "$VERSION" > version.txt
+ echo "✅ Detected version: $VERSION"
+
+ - name: Build all targets
+ run: |
+ mkdir -p bundles
+
+ PLATFORMS=(
+ "linux/amd64"
+ "linux/arm64"
+ "linux/arm/v7"
+ "linux/arm/v6"
+ "linux/riscv64"
+ "windows/amd64"
+ "windows/arm64"
+ )
+
+ for TARGET in "${PLATFORMS[@]}"; do
+ OS=$(echo "$TARGET" | cut -d/ -f1)
+ ARCH=$(echo "$TARGET" | cut -d/ -f2)
+ VARIANT=$(echo "$TARGET" | cut -d/ -f3)
+
+ OUT="qgato-${OS}-${ARCH}"
+ [ -n "$VARIANT" ] && OUT="${OUT}${VARIANT}"
+ BIN="$OUT"
+ [ "$OS" = "windows" ] && BIN="${OUT}.exe"
+
+ echo "🔨 Building $BIN"
+
+ # Disable CGO for cross-compiled targets (everything except native linux/amd64)
+ if [ "$TARGET" = "linux/amd64" ]; then
+ export CGO_ENABLED=1
+ else
+ export CGO_ENABLED=0
+ fi
+
+ if [ "$ARCH" = "arm" ]; then
+ case "$VARIANT" in
+ v7) GOARM=7 ;;
+ v6) GOARM=6 ;;
+ *) GOARM=7 ;;
+ esac
+ GOOS=$OS GOARCH=arm GOARM=$GOARM \
+ go build -ldflags="-s -w" -o "$BIN" ./.
+ else
+ GOOS=$OS GOARCH=$ARCH \
+ go build -ldflags="-s -w" -o "$BIN" ./.
+ fi
+
+ echo "📦 Packaging $BIN with required files..."
+
+ PKG_DIR="bundle-$OUT"
+ mkdir "$PKG_DIR"
+ cp "$BIN" "$PKG_DIR/"
+ cp -r lang static templates config.ini "$PKG_DIR/" 2>/dev/null || true
+
+ if [ "$OS" = "windows" ]; then
+ zip -r "bundles/$OUT.zip" "$PKG_DIR"
+ else
+ tar -czf "bundles/$OUT.tar.gz" "$PKG_DIR"
+ fi
+
+ rm -rf "$PKG_DIR" "$BIN"
+ done
+
+ - name: Create Forgejo release
+ run: |
+ TAG_NAME=$(cat version.txt)
+ echo "📦 Creating release for tag: $TAG_NAME"
+
+ DOWNLOAD_BASE="https://weforge.xyz/spitfire/Search/releases/download/$TAG_NAME"
+
+ echo "| Arch | Linux Bundle (.tar.gz) | Windows Bundle (.zip) |" > release.md
+ echo "|---------|---------------------------------------------------|--------------------------------------------------|" >> release.md
+ echo "| amd64 | [qgato-linux-amd64.tar.gz]($DOWNLOAD_BASE/qgato-linux-amd64.tar.gz) | [qgato-windows-amd64.zip]($DOWNLOAD_BASE/qgato-windows-amd64.zip) |" >> release.md
+ echo "| arm64 | [qgato-linux-arm64.tar.gz]($DOWNLOAD_BASE/qgato-linux-arm64.tar.gz) | [qgato-windows-arm64.zip]($DOWNLOAD_BASE/qgato-windows-arm64.zip) |" >> release.md
+ echo "| armv7 | [qgato-linux-armv7.tar.gz]($DOWNLOAD_BASE/qgato-linux-armv7.tar.gz) | — |" >> release.md
+ echo "| armv6 | [qgato-linux-armv6.tar.gz]($DOWNLOAD_BASE/qgato-linux-armv6.tar.gz) | — |" >> release.md
+ echo "| riscv64 | [qgato-linux-riscv64.tar.gz]($DOWNLOAD_BASE/qgato-linux-riscv64.tar.gz) | — |" >> release.md
+
+ RELEASE_BODY=$(cat release.md | jq -Rs .)
+
+ curl -sSL -X POST "$FORGEJO_API/repos/${OWNER}/${REPO}/releases" \
+ -H "Authorization: token $FORGEJO_TOKEN" \
+ -H "Content-Type: application/json" \
+ -d "{
+ \"tag_name\": \"$TAG_NAME\",
+ \"name\": \"$TAG_NAME\",
+ \"body\": $RELEASE_BODY,
+ \"draft\": false,
+ \"prerelease\": false
+ }"
+ env:
+ FORGEJO_API: https://weforge.xyz/api/v1
+ OWNER: spitfire
+ REPO: Search
+ FORGEJO_TOKEN: ${{ secrets.FORGEJO_TOKEN }}
+
+ - name: Upload all bundles
+ run: |
+ TAG_NAME=$(cat version.txt)
+ RELEASE_ID=$(curl -s -H "Authorization: token $FORGEJO_TOKEN" \
+ "$FORGEJO_API/repos/${OWNER}/${REPO}/releases/tags/$TAG_NAME" | jq -r .id)
+
+ for FILE in bundles/*; do
+ NAME=$(basename "$FILE")
+ echo "📤 Uploading $NAME"
+
+ CONTENT_TYPE="application/octet-stream"
+ [[ "$FILE" == *.zip ]] && CONTENT_TYPE="application/zip"
+ [[ "$FILE" == *.tar.gz ]] && CONTENT_TYPE="application/gzip"
+
+ curl -sSL -X POST "$FORGEJO_API/repos/${OWNER}/${REPO}/releases/${RELEASE_ID}/assets?name=$NAME" \
+ -H "Authorization: token $FORGEJO_TOKEN" \
+ -H "Content-Type: $CONTENT_TYPE" \
+ --data-binary "@$FILE"
+ done
+ env:
+ FORGEJO_API: https://weforge.xyz/api/v1
+ OWNER: spitfire
+ REPO: Search
+ FORGEJO_TOKEN: ${{ secrets.FORGEJO_TOKEN }}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 5f5aeab..c731c6b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,4 +6,5 @@ cache/
*.min.js
*.min.css
qgato
+qgato.exe
test.py
\ No newline at end of file
diff --git a/README.md b/README.md
index 5ad3337..af48ace 100644
--- a/README.md
+++ b/README.md
@@ -47,11 +47,11 @@ A self-hosted private search engine designed to be scalable and more resource-ef
### For Self-Hosting
-- **Self-hosted option** - Run on your own server for even more privacy.
+- **[Easy to Set Up](https://weforge.xyz/Spitfire/Search/wiki/Setup-Other)** - Quick and straightforward setup process for anyone.
- **Lightweight** - Low memory footprint (15-30MiB) even during searches.
- **Decentralized** - No single point of failure.
- **Results caching in RAM** - Faster response times through caching.
-- **Configurable** - Tweak features via `config.ini`.
+- **[Configurable](https://weforge.xyz/Spitfire/Search/wiki/Config)** - Fully customizable via the `config.ini` file.
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
### Results Sources
@@ -73,30 +73,20 @@ A self-hosted private search engine designed to be scalable and more resource-ef
### Running the QGato
-Linux:
-
```bash
git clone https://weforge.xyz/Spitfire/Search.git
cd Search
-chmod +x ./run.sh
-./run.sh
-```
-
-Windows:
-
-```powershell
-git clone https://weforge.xyz/Spitfire/Search.git
-cd Search
-.\run.bat
+go run .
```
*Its that easy!*
### Configuring
-Configuration is done via the ``config.ini`` file.
-On first start, you will be guided through the basic setup.
-More advanced setup and all options will be listed here later, as this is still being updated.
+- Configuration is done via the `config.ini` file.
+- On first start, you will be guided through the basic setup.
+- For more advanced configuration options, visit the [Wiki Configuration Page](https://weforge.xyz/Spitfire/Search/wiki/Config).
+
## License
diff --git a/agent.go b/agent.go
index 6333102..5028c3a 100755
--- a/agent.go
+++ b/agent.go
@@ -11,11 +11,13 @@ import (
"time"
)
+// BrowserVersion represents the version & global usage from the caniuse data
type BrowserVersion struct {
Version string `json:"version"`
Global float64 `json:"global"`
}
+// BrowserData holds sets of versions for Firefox and Chromium
type BrowserData struct {
Firefox []BrowserVersion `json:"firefox"`
Chromium []BrowserVersion `json:"chrome"`
@@ -28,6 +30,7 @@ var (
}{
data: make(map[string]string),
}
+
browserCache = struct {
sync.RWMutex
data BrowserData
@@ -37,26 +40,19 @@ var (
}
)
+// fetchLatestBrowserVersions retrieves usage data from caniuse.com’s fulldata JSON.
func fetchLatestBrowserVersions() (BrowserData, error) {
- url := "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
-
- // // Optional: skip TLS verification to avoid certificate errors
- // transport := &http.Transport{
- // TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
- // }
-
- // Increase the HTTP client timeout
+ const urlCaniuse = "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
client := &http.Client{
Timeout: 30 * time.Second,
- // Transport: transport,
}
- // Build the request manually to set headers
- req, err := http.NewRequest("GET", url, nil)
+ req, err := http.NewRequest("GET", urlCaniuse, nil)
if err != nil {
return BrowserData{}, err
}
- // Custom user agent and English language preference
+
+ // Set a simple custom User-Agent and language
req.Header.Set("User-Agent", "MyCustomAgent/1.0 (compatible; +https://example.com)")
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
@@ -71,36 +67,42 @@ func fetchLatestBrowserVersions() (BrowserData, error) {
return BrowserData{}, err
}
- var rawData map[string]interface{}
+ var rawData map[string]any
if err := json.Unmarshal(body, &rawData); err != nil {
return BrowserData{}, err
}
- stats := rawData["agents"].(map[string]interface{})
+ stats, ok := rawData["agents"].(map[string]any)
+ if !ok {
+ return BrowserData{}, fmt.Errorf("unexpected JSON structure (no 'agents' field)")
+ }
var data BrowserData
- if firefoxData, ok := stats["firefox"].(map[string]interface{}); ok {
- for version, usage := range firefoxData["usage_global"].(map[string]interface{}) {
- data.Firefox = append(data.Firefox, BrowserVersion{
- Version: version,
- Global: usage.(float64),
- })
+ // Extract Firefox data
+ if firefoxData, ok := stats["firefox"].(map[string]any); ok {
+ if usageMap, ok := firefoxData["usage_global"].(map[string]any); ok {
+ for version, usage := range usageMap {
+ val, _ := usage.(float64)
+ data.Firefox = append(data.Firefox, BrowserVersion{Version: version, Global: val})
+ }
}
}
- if chromeData, ok := stats["chrome"].(map[string]interface{}); ok {
- for version, usage := range chromeData["usage_global"].(map[string]interface{}) {
- data.Chromium = append(data.Chromium, BrowserVersion{
- Version: version,
- Global: usage.(float64),
- })
+ // Extract Chrome data
+ if chromeData, ok := stats["chrome"].(map[string]any); ok {
+ if usageMap, ok := chromeData["usage_global"].(map[string]any); ok {
+ for version, usage := range usageMap {
+ val, _ := usage.(float64)
+ data.Chromium = append(data.Chromium, BrowserVersion{Version: version, Global: val})
+ }
}
}
return data, nil
}
+// getLatestBrowserVersions checks the cache and fetches new data if expired
func getLatestBrowserVersions() (BrowserData, error) {
browserCache.RLock()
if time.Now().Before(browserCache.expires) {
@@ -117,37 +119,36 @@ func getLatestBrowserVersions() (BrowserData, error) {
browserCache.Lock()
browserCache.data = data
- browserCache.expires = time.Now().Add(24 * time.Hour)
+ browserCache.expires = time.Now().Add(24 * time.Hour) // Refresh daily
browserCache.Unlock()
return data, nil
}
+// randomUserAgent picks a random browser (Firefox/Chromium), selects a version based on usage,
+// picks an OS string, and composes a User-Agent header.
func randomUserAgent() (string, error) {
browsers, err := getLatestBrowserVersions()
if err != nil {
return "", err
}
- rand := rand.New(rand.NewSource(time.Now().UnixNano()))
+ r := rand.New(rand.NewSource(time.Now().UnixNano()))
- // Simulated browser usage statistics (in percentages)
+ // Overall usage: 85% chance for Chromium, 15% for Firefox
usageStats := map[string]float64{
- "Firefox": 30.0,
- "Chromium": 70.0,
+ "Firefox": 15.0,
+ "Chromium": 85.0,
}
- // Calculate the probabilities for the versions
- probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
-
- // Select a browser based on usage statistics
+ // Weighted random selection of the browser type
browserType := ""
- randVal := rand.Float64() * 100
+ randVal := r.Float64() * 100
cumulative := 0.0
- for browser, usage := range usageStats {
+ for bType, usage := range usageStats {
cumulative += usage
if randVal < cumulative {
- browserType = browser
+ browserType = bType
break
}
}
@@ -164,14 +165,16 @@ func randomUserAgent() (string, error) {
return "", fmt.Errorf("no versions found for browser: %s", browserType)
}
- // Sort versions by usage (descending order)
+ // Sort by global usage descending
sort.Slice(versions, func(i, j int) bool {
return versions[i].Global > versions[j].Global
})
- // Select a version based on the probabilities
+ // Probability distribution for top few versions
+ probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
+
version := ""
- randVal = rand.Float64()
+ randVal = r.Float64()
cumulative = 0.0
for i, p := range probabilities {
cumulative += p
@@ -181,68 +184,72 @@ func randomUserAgent() (string, error) {
}
}
- // Fallback to the last version if none matched
+ // Fallback to the least used version if none matched
if version == "" {
version = versions[len(versions)-1].Version
}
- // Generate the user agent string
- userAgent := generateUserAgent(browserType, version)
+ userAgent := generateUserAgent(browserType, version, r)
return userAgent, nil
}
-func generateUserAgent(browser, version string) string {
+// generateUserAgent composes the final UA string given the browser, version, and OS.
+func generateUserAgent(browser, version string, r *rand.Rand) string {
oses := []struct {
os string
probability float64
}{
{"Windows NT 10.0; Win64; x64", 44.0},
- {"Windows NT 11.0; Win64; x64", 44.0},
- {"X11; Linux x86_64", 1.0},
- {"X11; Ubuntu; Linux x86_64", 1.0},
+ {"X11; Linux x86_64", 2.0},
+ {"X11; Ubuntu; Linux x86_64", 2.0},
{"Macintosh; Intel Mac OS X 10_15_7", 10.0},
}
- // Select an OS based on probabilities
- randVal := rand.Float64() * 100
+ // Weighted random selection for OS
+ randVal := r.Float64() * 100
cumulative := 0.0
- selectedOS := ""
- for _, os := range oses {
- cumulative += os.probability
+ selectedOS := oses[0].os // Default in case distribution is off
+ for _, entry := range oses {
+ cumulative += entry.probability
if randVal < cumulative {
- selectedOS = os.os
+ selectedOS = entry.os
break
}
}
switch browser {
case "Firefox":
+ // Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:117.0) Gecko/20100101 Firefox/117.0
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
case "Chromium":
+ // Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.5938.132 Safari/537.36
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
+ default:
+ return ""
}
- return ""
}
+// updateCachedUserAgents randomly updates half of the cached UAs to new versions
func updateCachedUserAgents(newVersions BrowserData) {
cache.Lock()
defer cache.Unlock()
+
+ r := rand.New(rand.NewSource(time.Now().UnixNano()))
for key, userAgent := range cache.data {
- randVal := rand.Float64()
- if randVal < 0.5 {
- updatedUserAgent := updateUserAgentVersion(userAgent, newVersions)
+ if r.Float64() < 0.5 {
+ updatedUserAgent := updateUserAgentVersion(userAgent, newVersions, r)
cache.data[key] = updatedUserAgent
}
}
}
-func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
- // Parse the current user agent to extract browser and version
+// updateUserAgentVersion tries to parse the old UA, detect its browser, and update the version
+func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.Rand) string {
var browserType, version string
+
+ // Attempt to detect old UA patterns (Chromium or Firefox)
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium"
- } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
- browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
@@ -251,8 +258,6 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
browserType = "Chromium"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox"
- } else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
- browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
browserType = "Firefox"
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
@@ -261,22 +266,37 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
browserType = "Firefox"
}
- // Get the latest version for that browser
+ // Grab the newest version from the fetched data
var latestVersion string
if browserType == "Firefox" && len(newVersions.Firefox) > 0 {
+ // Sort by usage descending
+ sort.Slice(newVersions.Firefox, func(i, j int) bool {
+ return newVersions.Firefox[i].Global > newVersions.Firefox[j].Global
+ })
latestVersion = newVersions.Firefox[0].Version
} else if browserType == "Chromium" && len(newVersions.Chromium) > 0 {
+ // Sort by usage descending
+ sort.Slice(newVersions.Chromium, func(i, j int) bool {
+ return newVersions.Chromium[i].Global > newVersions.Chromium[j].Global
+ })
latestVersion = newVersions.Chromium[0].Version
}
- // Update the user agent string with the new version
- return generateUserAgent(browserType, latestVersion)
+ // If we failed to detect the browser or have no data, just return the old UA
+ if browserType == "" || latestVersion == "" {
+ return userAgent
+ }
+
+ // Create a new random OS-based UA string with the latest version
+ return generateUserAgent(browserType, latestVersion, r)
}
+// periodicAgentUpdate periodically refreshes browser data and user agents
func periodicAgentUpdate() {
for {
- // Sleep for a random interval between 1 and 2 days
- time.Sleep(time.Duration(24+rand.Intn(24)) * time.Hour)
+ // Sleep a random interval between 1 and 2 days
+ r := rand.New(rand.NewSource(time.Now().UnixNano()))
+ time.Sleep(time.Duration(24+r.Intn(24)) * time.Hour)
// Fetch the latest browser versions
newVersions, err := fetchLatestBrowserVersions()
@@ -296,6 +316,7 @@ func periodicAgentUpdate() {
}
}
+// GetUserAgent returns a cached UA for the given key or creates one if none exists.
func GetUserAgent(cacheKey string) (string, error) {
cache.RLock()
userAgent, found := cache.data[cacheKey]
@@ -314,9 +335,11 @@ func GetUserAgent(cacheKey string) (string, error) {
cache.data[cacheKey] = userAgent
cache.Unlock()
+ printDebug("Generated (cached or new) user agent: %s", userAgent)
return userAgent, nil
}
+// GetNewUserAgent always returns a newly generated UA, overwriting the cache.
func GetNewUserAgent(cacheKey string) (string, error) {
userAgent, err := randomUserAgent()
if err != nil {
@@ -327,6 +350,7 @@ func GetNewUserAgent(cacheKey string) (string, error) {
cache.data[cacheKey] = userAgent
cache.Unlock()
+ printDebug("Generated new user agent: %s", userAgent)
return userAgent, nil
}
diff --git a/cache-images.go b/cache-images.go
index 4e551cd..4e75b58 100644
--- a/cache-images.go
+++ b/cache-images.go
@@ -19,6 +19,7 @@ import (
"time"
"github.com/chai2010/webp"
+ "github.com/fyne-io/image/ico"
"golang.org/x/image/bmp"
"golang.org/x/image/tiff"
)
@@ -35,7 +36,7 @@ var (
imageURLMapMu sync.RWMutex
)
-func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error) {
+func cacheImage(imageURL, imageID string, imageType string) (string, bool, error) {
if imageURL == "" {
recordInvalidImageID(imageID)
return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
@@ -43,10 +44,15 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
// Construct the filename based on the image ID and type
var filename string
- if isThumbnail {
+ switch imageType {
+ case "thumb":
filename = fmt.Sprintf("%s_thumb.webp", imageID)
- } else {
+ case "icon":
+ filename = fmt.Sprintf("%s_icon.webp", imageID)
+ case "full":
filename = fmt.Sprintf("%s_full.webp", imageID)
+ default:
+ return "", false, fmt.Errorf("unknown image type: %s", imageType)
}
// Make sure we store inside: config.DriveCache.Path / images
@@ -136,24 +142,12 @@ func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error
return cachedImagePath, true, nil
}
- // Decode the image based on the content type
- var img image.Image
- switch contentType {
- case "image/jpeg":
- img, err = jpeg.Decode(bytes.NewReader(data))
- case "image/png":
- img, err = png.Decode(bytes.NewReader(data))
- case "image/gif":
- img, err = gif.Decode(bytes.NewReader(data))
- case "image/webp":
- img, err = webp.Decode(bytes.NewReader(data))
- case "image/bmp":
- img, err = bmp.Decode(bytes.NewReader(data))
- case "image/tiff":
- img, err = tiff.Decode(bytes.NewReader(data))
- default:
+ // Decode image
+ img, err := safeDecodeImage(contentType, data)
+ if err != nil {
+ printErr("Failed to decode favicon: %s [%s] (%v)", imageURL, imageID, err)
recordInvalidImageID(imageID)
- return "", false, fmt.Errorf("unsupported image type: %s", contentType)
+ return "", false, err
}
if err != nil {
@@ -225,29 +219,23 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
// Adjust to read from config.DriveCache.Path / images
cachedImagePath := filepath.Join(config.DriveCache.Path, "images", filename)
- if hasExtension && imageType == "thumb" {
- // Requesting cached image (thumbnail or full)
+ if hasExtension && (imageType == "thumb" || imageType == "icon") {
if _, err := os.Stat(cachedImagePath); err == nil {
- // Update the modification time to now
- err := os.Chtimes(cachedImagePath, time.Now(), time.Now())
- if err != nil {
- printWarn("Failed to update modification time for %s: %v", cachedImagePath, err)
- }
-
- // Determine content type based on file extension
- contentType := "image/webp"
- w.Header().Set("Content-Type", contentType)
+ // Update the modification time
+ _ = os.Chtimes(cachedImagePath, time.Now(), time.Now())
+ w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "public, max-age=31536000")
http.ServeFile(w, r, cachedImagePath)
return
} else {
- // Cached image not found
if config.DriveCacheEnabled {
- // Thumbnail should be cached, but not found
- serveMissingImage(w, r)
+ if imageType == "icon" {
+ serveGlobeImage(w, r)
+ } else {
+ serveMissingImage(w, r)
+ }
return
}
- // Else, proceed to proxy if caching is disabled
}
}
@@ -323,8 +311,12 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
invalidImageIDsMu.Unlock()
if isInvalid {
- // Image is invalid; inform the frontend by setting the missing image URL
- statusMap[id] = "/static/images/missing.svg"
+ // Image is invalid; provide appropriate fallback
+ if strings.HasSuffix(id, "_icon.webp") || strings.HasSuffix(id, "_icon") {
+ statusMap[id] = "/images/globe.svg"
+ } else {
+ statusMap[id] = "/images/missing.svg"
+ }
continue
}
@@ -332,11 +324,15 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
extensions := []string{"webp", "svg"} // Extensions without leading dots
imageReady := false
- // Check thumbnail first
for _, ext := range extensions {
- thumbFilename := fmt.Sprintf("%s_thumb.%s", id, ext)
- thumbPath := filepath.Join(config.DriveCache.Path, "images", thumbFilename)
+ thumbPath := filepath.Join(config.DriveCache.Path, "images", fmt.Sprintf("%s_thumb.%s", id, ext))
+ iconPath := filepath.Join(config.DriveCache.Path, "images", fmt.Sprintf("%s_icon.%s", id, ext))
+ if _, err := os.Stat(iconPath); err == nil {
+ statusMap[id] = fmt.Sprintf("/image/%s_icon.%s", id, ext)
+ imageReady = true
+ break
+ }
if _, err := os.Stat(thumbPath); err == nil {
statusMap[id] = fmt.Sprintf("/image/%s_thumb.%s", id, ext)
imageReady = true
@@ -360,11 +356,13 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
// If neither is ready and image is not invalid
if !imageReady {
- if !config.DriveCacheEnabled {
- // Hard cache is disabled; use the proxy URL
- statusMap[id] = fmt.Sprintf("/image/%s_thumb", id)
+ // Distinguish favicon vs image fallback
+ if strings.HasSuffix(id, "_icon.webp") || strings.HasSuffix(id, "_icon") {
+ statusMap[id] = "/images/globe.svg"
+ } else if !config.DriveCacheEnabled {
+ statusMap[id] = "/images/missing.svg"
}
- // Else, do not set statusMap[id]; the frontend will keep checking
+ // else: leave it unset — frontend will retry
}
}
@@ -510,6 +508,33 @@ func cleanupCache() {
}
}
+func safeDecodeImage(contentType string, data []byte) (img image.Image, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ err = fmt.Errorf("image decode panic: %v", r)
+ }
+ }()
+ switch contentType {
+ case "image/x-icon", "image/vnd.microsoft.icon":
+ img, err = ico.Decode(bytes.NewReader(data))
+ case "image/jpeg":
+ img, err = jpeg.Decode(bytes.NewReader(data))
+ case "image/png":
+ img, err = png.Decode(bytes.NewReader(data))
+ case "image/gif":
+ img, err = gif.Decode(bytes.NewReader(data))
+ case "image/webp":
+ img, err = webp.Decode(bytes.NewReader(data))
+ case "image/bmp":
+ img, err = bmp.Decode(bytes.NewReader(data))
+ case "image/tiff":
+ img, err = tiff.Decode(bytes.NewReader(data))
+ default:
+ err = fmt.Errorf("unsupported image type: %s", contentType)
+ }
+ return
+}
+
// Serve missing.svg
func serveMissingImage(w http.ResponseWriter, r *http.Request) {
missingImagePath := filepath.Join("static", "images", "missing.svg")
@@ -517,8 +542,25 @@ func serveMissingImage(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "no-store, must-revalidate")
w.Header().Set("Pragma", "no-cache")
w.Header().Set("Expires", "0")
- if config.DriveCacheEnabled {
- w.WriteHeader(http.StatusNotFound)
- }
http.ServeFile(w, r, missingImagePath)
}
+
+func serveGlobeImage(w http.ResponseWriter, r *http.Request) {
+ globePath := filepath.Join("static", "images", "globe.svg")
+
+ // Set error code FIRST
+ w.WriteHeader(http.StatusNotFound)
+
+ // Now read the file and write it manually, to avoid conflict with http.ServeFile
+ data, err := os.ReadFile(globePath)
+ if err != nil {
+ http.Error(w, "globe.svg not found", http.StatusInternalServerError)
+ return
+ }
+
+ w.Header().Set("Content-Type", "image/svg+xml")
+ w.Header().Set("Cache-Control", "no-store, must-revalidate")
+ w.Header().Set("Pragma", "no-cache")
+ w.Header().Set("Expires", "0")
+ _, _ = w.Write(data)
+}
diff --git a/cache.go b/cache.go
index ac2902d..afdb4da 100644
--- a/cache.go
+++ b/cache.go
@@ -11,7 +11,6 @@ import (
// SearchResult is a generic interface for all types of search results.
type SearchResult interface{}
-// Define various search result types implementing SearchResult interface
type TextSearchResult struct {
URL string
Header string
@@ -62,6 +61,18 @@ type ForumSearchResult struct {
ThumbnailSrc string `json:"thumbnailSrc,omitempty"`
}
+type MusicResult struct {
+ URL string
+ Title string
+ Artist string
+ Description string
+ PublishedDate string
+ Thumbnail string
+ // AudioURL string
+ Source string
+ Duration string
+}
+
// GeocodeCachedItem represents a geocoding result stored in the cache.
type GeocodeCachedItem struct {
Latitude string
@@ -123,6 +134,11 @@ func NewGeocodeCache() *GeocodeCache {
// Get retrieves the results for a given key from the cache.
func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
+ // Skip if RAM caching is disabled
+ if !config.RamCacheEnabled {
+ return nil, false
+ }
+
rc.mu.Lock()
defer rc.mu.Unlock()
@@ -143,6 +159,11 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
// Set stores the results for a given key in the cache.
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
+ // Skip if RAM caching is disabled
+ if !config.RamCacheEnabled {
+ return
+ }
+
rc.mu.Lock()
defer rc.mu.Unlock()
@@ -162,6 +183,11 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
// checkAndCleanCache removes items if memory usage exceeds the limit.
func (rc *ResultsCache) checkAndCleanCache() {
+ // Skip if RAM caching is disabled
+ if !config.RamCacheEnabled {
+ return
+ }
+
if rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
rc.cleanOldestItems()
}
@@ -179,6 +205,11 @@ func (rc *ResultsCache) currentMemoryUsage() uint64 {
// Get retrieves the geocoding result for a given query from the cache.
func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found bool, exists bool) {
+ // Skip if RAM caching is disabled
+ if !config.RamCacheEnabled {
+ return "", "", false, false
+ }
+
gc.mu.Lock()
defer gc.mu.Unlock()
@@ -198,6 +229,11 @@ func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found boo
}
func (gc *GeocodeCache) Set(query, latitude, longitude string, found bool) {
+ // Skip if RAM caching is disabled
+ if !config.RamCacheEnabled {
+ return
+ }
+
gc.mu.Lock()
defer gc.mu.Unlock()
@@ -259,15 +295,23 @@ func convertToSearchResults(results interface{}) []SearchResult {
genericResults[i] = r
}
return genericResults
+ case []MusicResult:
+ genericResults := make([]SearchResult, len(res))
+ for i, r := range res {
+ genericResults[i] = r
+ }
+ return genericResults
}
return nil
}
-func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) {
+func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult, []MusicResult) {
var textResults []TextSearchResult
var torrentResults []TorrentResult
var imageResults []ImageSearchResult
var forumResults []ForumSearchResult
+ var musicResults []MusicResult
+
for _, r := range results {
switch res := r.(type) {
case TextSearchResult:
@@ -278,7 +322,9 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor
imageResults = append(imageResults, res)
case ForumSearchResult:
forumResults = append(forumResults, res)
+ case MusicResult:
+ musicResults = append(musicResults, res)
}
}
- return textResults, torrentResults, imageResults, forumResults
+ return textResults, torrentResults, imageResults, forumResults, musicResults
}
diff --git a/common.go b/common.go
index 75f6b91..06c47bf 100755
--- a/common.go
+++ b/common.go
@@ -8,6 +8,7 @@ import (
"html/template"
mathrand "math/rand"
"net/http"
+ "net/url"
"strings"
"time"
)
@@ -28,14 +29,59 @@ var (
}
return string(jsonBytes), nil
},
+ "formatShortDate": func(date string) string {
+ t, _ := time.Parse("2006-01-02", date)
+ // return t.Format("Mon") // e.g. "Sat"
+ return t.Format("2.1.") // e.g. "29.6."
+ },
+ "weatherIcon": func(cur interface{}) string {
+ switch c := cur.(type) {
+ case map[string]interface{}:
+ if cond, ok := c["Condition"].(string); ok {
+ return iconForCond(cond)
+ }
+ case WeatherCurrent:
+ return iconForCond(c.Condition)
+ case *WeatherCurrent:
+ return iconForCond(c.Condition)
+ }
+ return "🌈"
+ },
}
)
+func iconForCond(cond string) string {
+ switch cond {
+ case "Clear":
+ return "☀️"
+ case "Partly cloudy":
+ return "⛅"
+ case "Cloudy":
+ return "☁️"
+ case "Rain":
+ return "🌧️"
+ case "Snow":
+ return "❄️"
+ case "Thunderstorm":
+ return "⛈️"
+ case "Fog":
+ return "🌫️"
+ default:
+ return "🌈"
+ }
+}
+
type SearchEngine struct {
Name string
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
}
+type LinkParts struct {
+ Domain template.HTML
+ Path template.HTML
+ RootURL string // used by getFaviconProxyURL()
+}
+
// Helper function to render templates without elapsed time measurement
func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]interface{}) {
// Generate icon paths for SVG and PNG, including a 1/10 chance for an alternate icon
@@ -107,3 +153,71 @@ func GetIconPath() (string, string) {
// Default paths
return "/static/images/icon.svg", "/static/images/icon.png"
}
+
+// FormatElapsedTime formats elapsed time as a string,
+// using:
+// - "> 0.01 ms" if under 49µs
+// - "0.xx ms" if under 1ms
+// - "xxx ms" if under 300ms
+// - "x.xx seconds" otherwise
+func FormatElapsedTime(elapsed time.Duration) string {
+ if elapsed < 49*time.Microsecond {
+ return fmt.Sprintf("> 0.01 %s", Translate("milliseconds"))
+ } else if elapsed < time.Millisecond {
+ ms := float64(elapsed.Microseconds()) / 1000.0
+ return fmt.Sprintf("%.2f %s", ms, Translate("milliseconds"))
+ } else if elapsed < 300*time.Millisecond {
+ return fmt.Sprintf("%d %s", elapsed.Milliseconds(), Translate("milliseconds"))
+ }
+ return fmt.Sprintf("%.2f %s", elapsed.Seconds(), Translate("seconds"))
+}
+func FormatURLParts(rawURL string) (domain, path, rootURL string) {
+ parsed, err := url.Parse(rawURL)
+ if err != nil || parsed.Host == "" {
+ return "", "", ""
+ }
+
+ domain = parsed.Host
+ if strings.HasPrefix(domain, "www.") {
+ domain = domain[4:]
+ }
+
+ rootURL = parsed.Scheme + "://" + parsed.Host
+
+ path = strings.Trim(parsed.Path, "/")
+ pathSegments := strings.Split(path, "/")
+ var cleanSegments []string
+ for _, seg := range pathSegments {
+ if seg != "" {
+ cleanSegments = append(cleanSegments, seg)
+ }
+ }
+ path = strings.Join(cleanSegments, "/")
+ return domain, path, rootURL
+}
+
+func FormatLinkHTML(rawURL string) LinkParts {
+ domain, path, root := FormatURLParts(rawURL)
+
+ lp := LinkParts{
+ RootURL: root,
+ }
+
+ lp.Domain = template.HTML(fmt.Sprintf(`%s`, template.HTMLEscapeString(domain)))
+
+ if path != "" {
+ pathDisplay := strings.ReplaceAll(path, "/", " › ")
+ lp.Path = template.HTML(fmt.Sprintf(` › %s`, template.HTMLEscapeString(pathDisplay)))
+ }
+
+ return lp
+}
+
+// Converts any struct to a map[string]interface{} using JSON round-trip.
+// Useful for rendering templates with generic map input.
+func toMap(data interface{}) map[string]interface{} {
+ jsonBytes, _ := json.Marshal(data)
+ var result map[string]interface{}
+ _ = json.Unmarshal(jsonBytes, &result)
+ return result
+}
diff --git a/config.go b/config.go
index 18d83cf..9ceaa06 100644
--- a/config.go
+++ b/config.go
@@ -4,10 +4,8 @@ import (
"bufio"
"fmt"
"os"
- "path/filepath"
"strconv"
"strings"
- "syscall"
"time"
"github.com/shirou/gopsutil/mem"
@@ -22,24 +20,42 @@ type CacheConfig struct {
Path string
}
+type MetaSearchConfig struct {
+ Text []string
+ Image []string
+ Files []string
+ Video []string
+}
+
type Config struct {
- Port int // Added
- AuthCode string // Added
- PeerID string // Added
- Peers []string
- Domain string // Added
- NodesEnabled bool // Added
- CrawlerEnabled bool // Added
- IndexerEnabled bool // Added
- WebsiteEnabled bool // Added
- RamCacheEnabled bool
- DriveCacheEnabled bool // Added
- LogLevel int // Added
+ Port int
+ NodeID string
+ Nodes []string
+ Domain string
+ NodesEnabled bool
+ MetaSearchEnabled bool
+ IndexerEnabled bool
+ WebsiteEnabled bool
+ RamCacheEnabled bool
+ DriveCacheEnabled bool
+ MetaProxyEnabled bool
+ MetaProxyStrict bool
+ MetaProxyRetry int
+ MetaProxies []string
+ CrawlerProxyEnabled bool
+ CrawlerProxyStrict bool
+ CrawlerProxies []string
+ CrawlerProxyRetry int
+ // Maybye add Proxy support for Image Extraction?
+ LogLevel int
ConcurrentStandardCrawlers int
ConcurrentChromeCrawlers int
CrawlingInterval time.Duration // Refres crawled results in...
MaxPagesPerDomain int // Max pages to crawl per domain
IndexBatchSize int
+ LibreXInstances []string
+
+ MetaSearch MetaSearchConfig
DriveCache CacheConfig
RamCache CacheConfig
@@ -48,20 +64,54 @@ type Config struct {
var defaultConfig = Config{
Port: 5000,
Domain: "localhost",
- Peers: []string{},
- AuthCode: generateStrongRandomString(64),
+ Nodes: []string{},
NodesEnabled: false,
- CrawlerEnabled: true,
+ MetaSearchEnabled: true,
IndexerEnabled: false,
WebsiteEnabled: true,
RamCacheEnabled: true,
DriveCacheEnabled: false,
+ MetaProxyEnabled: false,
+ MetaProxyStrict: true,
+ MetaProxies: []string{},
+ MetaProxyRetry: 3,
+ CrawlerProxyEnabled: false,
+ CrawlerProxyStrict: true,
+ CrawlerProxies: []string{},
+ CrawlerProxyRetry: 1,
ConcurrentStandardCrawlers: 12,
ConcurrentChromeCrawlers: 4,
CrawlingInterval: 24 * time.Hour,
MaxPagesPerDomain: 10,
IndexBatchSize: 50,
LogLevel: 1,
+ LibreXInstances: []string{"librex.antopie.org"},
+ MetaSearch: MetaSearchConfig{
+ // For Text search (skip SearXNG and LibreX by default, as that would be mega stupid)
+ Text: []string{"Google", "Brave", "DuckDuckGo"},
+
+ // For Image search
+ Image: []string{"Qwant", "Bing", "DeviantArt"},
+
+ // For Files search
+ Files: []string{"TorrentGalaxy", "ThePirateBay", "Nyaa"},
+
+ // For Video (piped instances)
+ Video: []string{
+ "api.piped.yt",
+ "pipedapi.moomoo.me",
+ "pipedapi.darkness.services",
+ "pipedapi.kavin.rocks",
+ "piped-api.hostux.net",
+ "pipedapi.syncpundit.io",
+ "piped-api.cfe.re",
+ "pipedapi.in.projectsegfau.lt",
+ "piapi.ggtyler.dev",
+ "piped-api.codespace.cz",
+ "pipedapi.coldforge.xyz",
+ "pipedapi.osphost.fi",
+ },
+ },
DriveCache: CacheConfig{
Duration: 48 * time.Hour, // Added
Path: "./cache", // Added
@@ -216,12 +266,6 @@ func createConfig() error {
config = defaultConfig
}
- // Generate AuthCode if missing
- if config.AuthCode == "" {
- config.AuthCode = generateStrongRandomString(64)
- printMessage("Generated connection code: %s\n", config.AuthCode)
- }
-
saveConfig(config)
printInfo("Configuration saved successfully.")
return nil
@@ -236,26 +280,59 @@ func saveConfig(config Config) {
sec.Key("Domain").SetValue(config.Domain)
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
- // Peers section
- peersSec := cfg.Section("Peers")
- peersSec.Key("AuthCode").SetValue(config.AuthCode)
- peersSec.Key("PeerID").SetValue(config.PeerID)
- peersSec.Key("Peers").SetValue(strings.Join(config.Peers, ","))
+ // Nodes section
+ nodesSec := cfg.Section("Nodes")
+ nodesSec.Key("NodeID").SetValue(config.NodeID)
+ nodesSec.Key("Nodes").SetValue(strings.Join(config.Nodes, ","))
// Features section
- featuresSec := cfg.Section("Features")
- featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
- featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
- featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
- featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
+ if config.NodesEnabled != defaultConfig.NodesEnabled ||
+ config.MetaSearchEnabled != defaultConfig.MetaSearchEnabled ||
+ config.IndexerEnabled != defaultConfig.IndexerEnabled ||
+ config.WebsiteEnabled != defaultConfig.WebsiteEnabled ||
+ config.MetaProxyEnabled != defaultConfig.MetaProxyEnabled ||
+ config.CrawlerProxyEnabled != defaultConfig.CrawlerProxyEnabled {
+
+ featuresSec := cfg.Section("Features")
+ setBoolIfChanged(featuresSec, "Nodes", config.NodesEnabled, defaultConfig.NodesEnabled)
+ setBoolIfChanged(featuresSec, "Crawler", config.MetaSearchEnabled, defaultConfig.MetaSearchEnabled)
+ setBoolIfChanged(featuresSec, "Indexer", config.IndexerEnabled, defaultConfig.IndexerEnabled)
+ setBoolIfChanged(featuresSec, "Website", config.WebsiteEnabled, defaultConfig.WebsiteEnabled)
+ setBoolIfChanged(featuresSec, "MetaProxy", config.MetaProxyEnabled, defaultConfig.MetaProxyEnabled)
+ setBoolIfChanged(featuresSec, "CrawlerProxy", config.CrawlerProxyEnabled, defaultConfig.CrawlerProxyEnabled)
+ }
+
+ // Proxies section
+ proxiesSec := cfg.Section("Proxies")
+ proxiesSec.Key("MetaProxyStrict").SetValue(strconv.FormatBool(config.MetaProxyStrict))
+ proxiesSec.Key("MetaProxies").SetValue(strings.Join(config.MetaProxies, ","))
+ setBoolIfChanged(proxiesSec, "CrawlerProxyStrict", config.CrawlerProxyStrict, defaultConfig.CrawlerProxyStrict)
+ setSliceIfChanged(proxiesSec, "CrawlerProxies", config.CrawlerProxies, defaultConfig.CrawlerProxies)
+ proxiesSec.Key("MetaProxyRetry").SetValue(strconv.Itoa(config.MetaProxyRetry))
+ setIntIfChanged(proxiesSec, "CrawlerProxyRetry", config.CrawlerProxyRetry, defaultConfig.CrawlerProxyRetry)
+
+ // MetaSearch section
+ metaSec := cfg.Section("MetaSearch")
+ metaSec.Key("LibreXInstances").SetValue(strings.Join(config.LibreXInstances, ","))
+ metaSec.Key("Text").SetValue(strings.Join(config.MetaSearch.Text, ","))
+ metaSec.Key("Image").SetValue(strings.Join(config.MetaSearch.Image, ","))
+ metaSec.Key("Files").SetValue(strings.Join(config.MetaSearch.Files, ","))
+ metaSec.Key("Video").SetValue(strings.Join(config.MetaSearch.Video, ","))
// Indexer section
- indexerSec := cfg.Section("Indexer")
- indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
- indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
- indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
- indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
- indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
+ if config.ConcurrentStandardCrawlers != defaultConfig.ConcurrentStandardCrawlers ||
+ config.ConcurrentChromeCrawlers != defaultConfig.ConcurrentChromeCrawlers ||
+ config.CrawlingInterval != defaultConfig.CrawlingInterval ||
+ config.MaxPagesPerDomain != defaultConfig.MaxPagesPerDomain ||
+ config.IndexBatchSize != defaultConfig.IndexBatchSize {
+
+ indexerSec := cfg.Section("Indexer")
+ setIntIfChanged(indexerSec, "ConcurrentStandardCrawlers", config.ConcurrentStandardCrawlers, defaultConfig.ConcurrentStandardCrawlers)
+ setIntIfChanged(indexerSec, "ConcurrentChromeCrawlers", config.ConcurrentChromeCrawlers, defaultConfig.ConcurrentChromeCrawlers)
+ setIfChanged(indexerSec, "CrawlingInterval", config.CrawlingInterval.String(), defaultConfig.CrawlingInterval.String())
+ setIntIfChanged(indexerSec, "MaxPagesPerDomain", config.MaxPagesPerDomain, defaultConfig.MaxPagesPerDomain)
+ setIntIfChanged(indexerSec, "IndexBatchSize", config.IndexBatchSize, defaultConfig.IndexBatchSize)
+ }
// DriveCache section
driveSec := cfg.Section("DriveCache")
@@ -286,18 +363,48 @@ func loadConfig() Config {
domain := getConfigValueString(cfg.Section("Server").Key("Domain"), defaultConfig.Domain)
logLevel := getConfigValue(cfg.Section("Server").Key("LogLevel"), defaultConfig.LogLevel, strconv.Atoi)
- // Peers
- authCode := getConfigValueString(cfg.Section("Peers").Key("AuthCode"), defaultConfig.AuthCode)
- peers := strings.Split(getConfigValueString(cfg.Section("Peers").Key("Peers"), ""), ",")
+ // Nodes
+ nodeID := getConfigValueString(cfg.Section("Nodes").Key("NodeID"), defaultConfig.NodeID)
+ nodes := strings.Split(getConfigValueString(cfg.Section("Nodes").Key("Nodes"), ""), ",")
// Features
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
- crawlerEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.CrawlerEnabled)
+ metaSearchEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.MetaSearchEnabled)
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
+ metaProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("MetaProxy"), defaultConfig.MetaProxyEnabled)
+ crawlerProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("CrawlerProxy"), defaultConfig.CrawlerProxyEnabled)
+ // Nodes
+ metaProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("MetaProxyStrict"), defaultConfig.MetaProxyStrict)
+ metaProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("MetaProxies"), ""), ",")
+ crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
+ crawlerProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("CrawlerProxies"), ""), ",")
+ metaProxyRetry := getConfigValue(cfg.Section("Proxies").Key("MetaProxyRetry"), defaultConfig.MetaProxyRetry, strconv.Atoi)
+ crawlerProxyRetry := getConfigValue(cfg.Section("Proxies").Key("CrawlerProxyRetry"), defaultConfig.CrawlerProxyRetry, strconv.Atoi)
+
+ // MetaSearch
+ searchXInstances := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("LibreXInstances"), strings.Join(defaultConfig.LibreXInstances, ",")), ",")
+ textList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Text"), strings.Join(defaultConfig.MetaSearch.Text, ",")), ",")
+ imageList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Image"), strings.Join(defaultConfig.MetaSearch.Image, ",")), ",")
+ filesList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Files"), strings.Join(defaultConfig.MetaSearch.Files, ",")), ",")
+ videoList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Video"), strings.Join(defaultConfig.MetaSearch.Video, ",")), ",")
+
+ // Load default values for MetaSearch if they are empty
+ if isEmptyList(textList) {
+ textList = defaultConfig.MetaSearch.Text
+ }
+ if isEmptyList(imageList) {
+ imageList = defaultConfig.MetaSearch.Image
+ }
+ if isEmptyList(filesList) {
+ filesList = defaultConfig.MetaSearch.Files
+ }
+ if isEmptyList(videoList) {
+ videoList = defaultConfig.MetaSearch.Video
+ }
// Indexing
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
concurrentChromeCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentChromeCrawlers"), defaultConfig.ConcurrentChromeCrawlers, strconv.Atoi)
@@ -322,19 +429,34 @@ func loadConfig() Config {
Port: port,
Domain: domain,
LogLevel: logLevel,
- AuthCode: authCode,
- Peers: peers,
+ NodeID: nodeID,
+ Nodes: nodes,
NodesEnabled: nodesEnabled,
- CrawlerEnabled: crawlerEnabled,
+ MetaSearchEnabled: metaSearchEnabled,
IndexerEnabled: indexerEnabled,
WebsiteEnabled: websiteEnabled,
RamCacheEnabled: ramCacheEnabled,
DriveCacheEnabled: driveCacheEnabled,
+ MetaProxyEnabled: metaProxyEnabled,
+ MetaProxyStrict: metaProxyStrict,
+ MetaProxies: metaProxies,
+ MetaProxyRetry: metaProxyRetry,
+ CrawlerProxyEnabled: crawlerProxyEnabled,
+ CrawlerProxyStrict: crawlerProxyStrict,
+ CrawlerProxies: crawlerProxies,
+ CrawlerProxyRetry: crawlerProxyRetry,
ConcurrentStandardCrawlers: concurrentStandardCrawlers,
ConcurrentChromeCrawlers: concurrentChromeCrawlers,
CrawlingInterval: crawlingInterval,
MaxPagesPerDomain: maxPagesPerDomain,
IndexBatchSize: indexBatchSize,
+ LibreXInstances: searchXInstances,
+ MetaSearch: MetaSearchConfig{
+ Text: textList,
+ Image: imageList,
+ Files: filesList,
+ Video: videoList,
+ },
DriveCache: CacheConfig{
Duration: driveDuration,
MaxUsageBytes: driveMaxUsage,
@@ -347,6 +469,34 @@ func loadConfig() Config {
}
}
+func isEmptyList(list []string) bool {
+ return len(list) == 0 || (len(list) == 1 && strings.TrimSpace(list[0]) == "")
+}
+
+func setIfChanged(sec *ini.Section, key string, value string, defaultValue string) {
+ if value != defaultValue {
+ sec.Key(key).SetValue(value)
+ }
+}
+
+func setBoolIfChanged(sec *ini.Section, key string, value bool, defaultValue bool) {
+ if value != defaultValue {
+ sec.Key(key).SetValue(strconv.FormatBool(value))
+ }
+}
+
+func setIntIfChanged(sec *ini.Section, key string, value int, defaultValue int) {
+ if value != defaultValue {
+ sec.Key(key).SetValue(strconv.Itoa(value))
+ }
+}
+
+func setSliceIfChanged(sec *ini.Section, key string, value, defaultValue []string) {
+ if strings.Join(value, ",") != strings.Join(defaultValue, ",") {
+ sec.Key(key).SetValue(strings.Join(value, ","))
+ }
+}
+
// getConfigValue retrieves a configuration value or returns a default value from defaultConfig.
func getConfigValue[T any](key *ini.Key, defaultValue T, parseFunc func(string) (T, error)) T {
if key == nil || key.String() == "" {
@@ -432,27 +582,6 @@ func parseMaxUsageDrive(value string, cachePath string) uint64 {
return 0
}
-// Get total disk space of the system where cachePath resides
-func getTotalDiskSpace(cachePath string) uint64 {
- var stat syscall.Statfs_t
-
- // Get filesystem stats for the cache path
- absPath, err := filepath.Abs(cachePath)
- if err != nil {
- printErr("Failed to resolve absolute path for: %s", cachePath)
- return 0
- }
-
- err = syscall.Statfs(absPath, &stat)
- if err != nil {
- printErr("Failed to retrieve filesystem stats for: %s", absPath)
- return 0
- }
-
- // Total disk space in bytes
- return stat.Blocks * uint64(stat.Bsize)
-}
-
// Helper to format bytes back to human-readable string
func formatMaxUsage(bytes uint64) string {
const GiB = 1024 * 1024 * 1024
diff --git a/crawler-extraction.go b/crawler-extraction.go
index 4ce8b9d..1297d27 100644
--- a/crawler-extraction.go
+++ b/crawler-extraction.go
@@ -1,3 +1,6 @@
+//go:build experimental
+// +build experimental
+
package main
import (
@@ -32,8 +35,12 @@ func fetchPageMetadataStandard(pageURL, userAgent string) (string, string, strin
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
- // Create context
- ctx, cancel := chromedp.NewContext(context.Background())
+ // Create a custom allocator context for Chromedp with proxy support if enabled
+ allocCtx, cancelAlloc := chromedp.NewExecAllocator(context.Background(), configureChromeOptions()...)
+ defer cancelAlloc()
+
+ // Create a browser context
+ ctx, cancel := chromedp.NewContext(allocCtx)
defer cancel()
var renderedHTML string
@@ -57,9 +64,36 @@ func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string)
return extractParsedDOM(doc)
}
+// configureChromeOptions sets up Chrome options and proxy if CrawlerProxy is enabled.
+func configureChromeOptions() []chromedp.ExecAllocatorOption {
+ options := chromedp.DefaultExecAllocatorOptions[:]
+
+ // This code is not using config.CrawlerProxyRetry
+ if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
+ // Retrieve proxy settings from CrawlerProxy
+ proxy := crawlerProxyClient.GetProxy() // Ensure a `GetProxy` method is implemented for your proxy client
+ if proxy != "" {
+ options = append(options, chromedp.ProxyServer(proxy))
+ printDebug("Using CrawlerProxy for Chromedp: %s", proxy)
+ } else {
+ printWarn("CrawlerProxy is enabled but no valid proxy is available")
+ }
+ }
+
+ // // Add additional Chrome
+ // options = append(options,
+ // chromedp.Flag("headless", true),
+ // chromedp.Flag("disable-gpu", true),
+ // chromedp.Flag("no-sandbox", true),
+ // chromedp.Flag("disable-setuid-sandbox", true),
+ // )
+
+ return options
+}
+
// extractStandard does the normal HTML parse with OG, Twitter, etc.
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
- client := &http.Client{Timeout: 15 * time.Second}
+
req, err := http.NewRequest("GET", pageURL, nil)
if err != nil {
printDebug("Failed to create request for %s: %v", pageURL, err)
@@ -68,7 +102,8 @@ func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
req.Header.Set("User-Agent", userAgent)
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
- resp, err := client.Do(req)
+ // Use CrawlerProxy if enabled
+ resp, err := DoCrawlerProxyRequest(req)
if err != nil {
printDebug("Failed to GET %s: %v", pageURL, err)
return
@@ -176,7 +211,6 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
return title, desc, keywords
}
- client := &http.Client{Timeout: 15 * time.Second}
readReq, err := http.NewRequest("GET", pageURL, nil)
if err != nil {
printDebug("Failed to create fallbackReadability request: %v", err)
@@ -185,14 +219,16 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
readReq.Header.Set("User-Agent", userAgent)
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
- readResp, err := client.Do(readReq)
- if err != nil || readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
- if err != nil {
- printDebug("go-readability GET error for %s: %v", pageURL, err)
- }
- if readResp != nil {
- readResp.Body.Close()
- }
+ // Use CrawlerProxy if enabled
+ readResp, err := DoCrawlerProxyRequest(readReq)
+ if err != nil {
+ printDebug("go-readability GET error for %s: %v", pageURL, err)
+ return title, desc, keywords
+ }
+
+ if readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
+ printDebug("go-readability GET returned status %d for %s", readResp.StatusCode, pageURL)
+ readResp.Body.Close() // Safely close body
return title, desc, keywords
}
defer readResp.Body.Close()
diff --git a/crawler-visited.go b/crawler-visited.go
index bfa1af9..ddabb98 100644
--- a/crawler-visited.go
+++ b/crawler-visited.go
@@ -1,3 +1,6 @@
+//go:build experimental
+// +build experimental
+
package main
import (
diff --git a/crawler.go b/crawler.go
index 8caa073..795b189 100644
--- a/crawler.go
+++ b/crawler.go
@@ -1,3 +1,6 @@
+//go:build experimental
+// +build experimental
+
package main
import (
diff --git a/disk.go b/disk.go
new file mode 100644
index 0000000..0eafde9
--- /dev/null
+++ b/disk.go
@@ -0,0 +1,27 @@
+//go:build !windows
+// +build !windows
+
+package main
+
+import (
+ "path/filepath"
+ "syscall"
+)
+
+func getTotalDiskSpace(cachePath string) uint64 {
+ var stat syscall.Statfs_t
+
+ absPath, err := filepath.Abs(cachePath)
+ if err != nil {
+ printErr("Failed to resolve absolute path for: %s", cachePath)
+ return 0
+ }
+
+ err = syscall.Statfs(absPath, &stat)
+ if err != nil {
+ printErr("Failed to retrieve filesystem stats for: %s", absPath)
+ return 0
+ }
+
+ return stat.Blocks * uint64(stat.Bsize)
+}
diff --git a/disk_win.go b/disk_win.go
new file mode 100644
index 0000000..394549d
--- /dev/null
+++ b/disk_win.go
@@ -0,0 +1,36 @@
+//go:build windows
+// +build windows
+
+package main
+
+import (
+ "syscall"
+ "unsafe"
+)
+
+func getTotalDiskSpace(path string) uint64 {
+ kernel32 := syscall.NewLazyDLL("kernel32.dll")
+ getDiskFreeSpaceExW := kernel32.NewProc("GetDiskFreeSpaceExW")
+
+ lpDirectoryName, err := syscall.UTF16PtrFromString(path)
+ if err != nil {
+ printErr("Failed to encode path for Windows API: %v", err)
+ return 0
+ }
+
+ var freeBytesAvailable, totalNumberOfBytes, totalNumberOfFreeBytes uint64
+
+ r1, _, err := getDiskFreeSpaceExW.Call(
+ uintptr(unsafe.Pointer(lpDirectoryName)),
+ uintptr(unsafe.Pointer(&freeBytesAvailable)),
+ uintptr(unsafe.Pointer(&totalNumberOfBytes)),
+ uintptr(unsafe.Pointer(&totalNumberOfFreeBytes)),
+ )
+
+ if r1 == 0 {
+ printErr("GetDiskFreeSpaceExW failed: %v", err)
+ return 0
+ }
+
+ return totalNumberOfBytes
+}
diff --git a/favicon.go b/favicon.go
new file mode 100644
index 0000000..cd08682
--- /dev/null
+++ b/favicon.go
@@ -0,0 +1,544 @@
+package main
+
+import (
+ "crypto/md5"
+ "crypto/tls"
+ "encoding/base64"
+ "encoding/hex"
+ "fmt"
+ "image"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/chai2010/webp"
+ "golang.org/x/image/draw"
+ "golang.org/x/net/html"
+)
+
+var (
+ faviconCache = struct {
+ sync.RWMutex
+ m map[string]bool // tracks in-progress downloads
+ }{m: make(map[string]bool)}
+
+ // Common favicon paths to try
+ commonFaviconPaths = []string{
+ "/favicon.ico",
+ "/favicon.png",
+ "/favicon.jpg",
+ "/favicon.jpeg",
+ "/favicon.webp",
+ "/apple-touch-icon.png",
+ "/apple-touch-icon-precomposed.png",
+ }
+)
+
+// Add this near the top with other vars
+var (
+ faviconDownloadQueue = make(chan faviconDownloadRequest, 1000)
+)
+
+type faviconDownloadRequest struct {
+ faviconURL string
+ pageURL string
+ cacheID string
+}
+
+func init() {
+ // Start 5 worker goroutines to process favicon downloads
+ for i := 0; i < 5; i++ {
+ go faviconDownloadWorker()
+ }
+}
+
+func faviconDownloadWorker() {
+ for req := range faviconDownloadQueue {
+ cacheFavicon(req.faviconURL, req.cacheID)
+ }
+}
+
+// Generates a cache ID from URL
+func faviconIDFromURL(rawURL string) string {
+ hasher := md5.New()
+ hasher.Write([]byte(rawURL))
+ return hex.EncodeToString(hasher.Sum(nil))
+}
+
+// Resolves favicon URL using multiple methods
+func resolveFaviconURL(rawFavicon, pageURL string) (faviconURL, cacheID string) {
+ cacheID = faviconIDFromURL(pageURL)
+
+ // Handle data URLs first
+ if strings.HasPrefix(rawFavicon, "data:image") {
+ parts := strings.SplitN(rawFavicon, ";base64,", 2)
+ if len(parts) == 2 {
+ data, err := base64.StdEncoding.DecodeString(parts[1])
+ if err == nil {
+ hasher := md5.New()
+ hasher.Write(data)
+ return rawFavicon, hex.EncodeToString(hasher.Sum(nil))
+ }
+ }
+ return "", "" // Invalid data URL
+ }
+
+ // Existing URL handling logic
+ if rawFavicon != "" && strings.HasPrefix(rawFavicon, "http") {
+ cacheID = faviconIDFromURL(rawFavicon)
+ return rawFavicon, cacheID
+ }
+
+ parsedPage, err := url.Parse(pageURL)
+ if err != nil {
+ return "", ""
+ }
+
+ // Method 1: Parse HTML
+ if favicon := findFaviconInHTML(pageURL); favicon != "" {
+ if strings.HasPrefix(favicon, "http") {
+ return favicon, faviconIDFromURL(favicon)
+ }
+ resolved := resolveRelativeURL(parsedPage, favicon)
+ return resolved, faviconIDFromURL(resolved)
+ }
+
+ // Method 2: Common paths
+ for _, path := range commonFaviconPaths {
+ testURL := "https://" + parsedPage.Host + path
+ if checkURLExists(testURL) {
+ return testURL, faviconIDFromURL(testURL)
+ }
+ }
+
+ // Method 3: HTTP headers
+ if headerIcon := findFaviconInHeaders(pageURL); headerIcon != "" {
+ if strings.HasPrefix(headerIcon, "http") {
+ return headerIcon, faviconIDFromURL(headerIcon)
+ }
+ resolved := resolveRelativeURL(parsedPage, headerIcon)
+ return resolved, faviconIDFromURL(resolved)
+ }
+
+ // Fallback
+ fallbackURL := "https://" + parsedPage.Host + "/favicon.ico"
+ return fallbackURL, faviconIDFromURL(fallbackURL)
+}
+
+// Checks HTTP headers for favicon links
+func findFaviconInHeaders(pageURL string) string {
+ client := &http.Client{
+ Timeout: 3 * time.Second, // like 3 seconds for favicon should be enough
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+ },
+ }
+
+ req, err := http.NewRequest("HEAD", pageURL, nil)
+ if err != nil {
+ return ""
+ }
+
+ // Add User-Agent
+ userAgent, err := GetUserAgent("findFaviconInHeaders")
+ if err != nil {
+ printWarn("Error getting User-Agent: %v", err)
+ }
+ req.Header.Set("User-Agent", userAgent)
+
+ resp, err := client.Do(req)
+ if err != nil {
+ return ""
+ }
+ defer resp.Body.Close()
+
+ // Check Link headers (common for favicons)
+ if links, ok := resp.Header["Link"]; ok {
+ for _, link := range links {
+ parts := strings.Split(link, ";")
+ if len(parts) < 2 {
+ continue
+ }
+
+ urlPart := strings.TrimSpace(parts[0])
+ if !strings.HasPrefix(urlPart, "<") || !strings.HasSuffix(urlPart, ">") {
+ continue
+ }
+
+ urlPart = urlPart[1 : len(urlPart)-1] // Remove < and >
+ for _, part := range parts[1:] {
+ part = strings.TrimSpace(part)
+ if strings.EqualFold(part, `rel="icon"`) ||
+ strings.EqualFold(part, `rel=icon`) ||
+ strings.EqualFold(part, `rel="shortcut icon"`) ||
+ strings.EqualFold(part, `rel=shortcut icon`) {
+ return urlPart
+ }
+ }
+ }
+ }
+
+ return ""
+}
+
+// Helper to resolve relative URLs
+func resolveRelativeURL(base *url.URL, relative string) string {
+ if strings.HasPrefix(relative, "http") {
+ return relative
+ }
+ if strings.HasPrefix(relative, "//") {
+ return base.Scheme + ":" + relative
+ }
+ if strings.HasPrefix(relative, "/") {
+ return base.Scheme + "://" + base.Host + relative
+ }
+ return base.Scheme + "://" + base.Host + base.Path + "/" + relative
+}
+
+// Checks if a URL exists (returns 200 OK)
+func checkURLExists(url string) bool {
+ client := &http.Client{
+ Timeout: 5 * time.Second,
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+ },
+ }
+ req, err := http.NewRequest("HEAD", url, nil)
+ if err != nil {
+ return false
+ }
+
+ // Add User-Agent
+ userAgent, err := GetUserAgent("Text-Search-Brave")
+ if err != nil {
+ printWarn("Error getting User-Agent: %v", err)
+ }
+ req.Header.Set("checkURLExists", userAgent)
+
+ resp, err := client.Do(req)
+ if err != nil {
+ return false
+ }
+ resp.Body.Close()
+ return resp.StatusCode == http.StatusOK
+}
+
+// Fetches HTML and looks for favicon links
+func findFaviconInHTML(pageURL string) string {
+ client := &http.Client{
+ Timeout: 10 * time.Second,
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+ },
+ }
+
+ req, err := http.NewRequest("GET", pageURL, nil)
+ if err != nil {
+ return ""
+ }
+
+ // Add User-Agent
+ userAgent, err := GetUserAgent("findFaviconInHTML")
+ if err != nil {
+ printWarn("Error getting User-Agent: %v", err)
+ }
+ req.Header.Set("User-Agent", userAgent)
+
+ resp, err := client.Do(req)
+ if err != nil {
+ return ""
+ }
+ defer resp.Body.Close()
+
+ // Check if this is an AMP page
+ isAMP := false
+ for _, attr := range resp.Header["Link"] {
+ if strings.Contains(attr, "rel=\"amphtml\"") {
+ isAMP = true
+ break
+ }
+ }
+
+ // Parse HTML
+ doc, err := html.Parse(resp.Body)
+ if err != nil {
+ return ""
+ }
+
+ var faviconURL string
+ var findLinks func(*html.Node)
+ findLinks = func(n *html.Node) {
+ if n.Type == html.ElementNode && n.Data == "link" {
+ var rel, href string
+ for _, attr := range n.Attr {
+ switch attr.Key {
+ case "rel":
+ rel = attr.Val
+ case "href":
+ href = attr.Val
+ }
+ }
+
+ // Prioritize different favicon types
+ if href != "" {
+ switch rel {
+ case "icon", "shortcut icon", "apple-touch-icon", "apple-touch-icon-precomposed":
+ // For AMP pages, prefer the non-versioned URL if possible
+ if isAMP {
+ if u, err := url.Parse(href); err == nil {
+ u.RawQuery = "" // Remove query parameters
+ href = u.String()
+ }
+ }
+ if faviconURL == "" || // First found
+ rel == "apple-touch-icon" || // Prefer apple-touch-icon
+ rel == "icon" { // Then regular icon
+ faviconURL = href
+ }
+ }
+ }
+ }
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ findLinks(c)
+ }
+ }
+ findLinks(doc)
+
+ return faviconURL
+}
+
+func getFaviconProxyURL(rawFavicon, pageURL string) string {
+ if pageURL == "" {
+ return "/static/images/globe.svg"
+ }
+
+ cacheID := faviconIDFromURL(pageURL)
+ filename := fmt.Sprintf("%s_icon.webp", cacheID)
+ cachedPath := filepath.Join(config.DriveCache.Path, "images", filename)
+
+ if _, err := os.Stat(cachedPath); err == nil {
+ return fmt.Sprintf("/image/%s_icon.webp", cacheID)
+ }
+
+ // Resolve URL
+ faviconURL, _ := resolveFaviconURL(rawFavicon, pageURL)
+ if faviconURL == "" {
+ recordInvalidImageID(cacheID)
+ return "/static/images/globe.svg"
+ }
+
+ // Check if already downloading
+ faviconCache.RLock()
+ downloading := faviconCache.m[cacheID]
+ faviconCache.RUnlock()
+
+ if !downloading {
+ faviconCache.Lock()
+ faviconCache.m[cacheID] = true
+ faviconCache.Unlock()
+
+ // Send to download queue instead of starting goroutine
+ faviconDownloadQueue <- faviconDownloadRequest{
+ faviconURL: faviconURL,
+ pageURL: pageURL,
+ cacheID: cacheID,
+ }
+ }
+
+ return fmt.Sprintf("/image/%s_icon.webp", cacheID)
+}
+
+// Caches favicon, always saving *_icon.webp
+func cacheFavicon(imageURL, imageID string) (string, bool, error) {
+ // if imageURL == "" {
+ // recordInvalidImageID(imageID)
+ // return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
+ // }
+
+ // Debug
+ printDebug("Downloading favicon ID: %s\n", imageID)
+
+ filename := fmt.Sprintf("%s_icon.webp", imageID)
+ imageCacheDir := filepath.Join(config.DriveCache.Path, "images")
+ if err := os.MkdirAll(imageCacheDir, 0755); err != nil {
+ return "", false, fmt.Errorf("couldn't create images folder: %v", err)
+ }
+ cachedImagePath := filepath.Join(imageCacheDir, filename)
+ tempImagePath := cachedImagePath + ".tmp"
+
+ // Already cached?
+ if _, err := os.Stat(cachedImagePath); err == nil {
+ return cachedImagePath, true, nil
+ }
+
+ cachingImagesMu.Lock()
+ if _, exists := cachingImages[imageURL]; !exists {
+ cachingImages[imageURL] = &sync.Mutex{}
+ }
+ mu := cachingImages[imageURL]
+ cachingImagesMu.Unlock()
+
+ mu.Lock()
+ defer mu.Unlock()
+
+ // Recheck after lock
+ if _, err := os.Stat(cachedImagePath); err == nil {
+ return cachedImagePath, true, nil
+ }
+
+ cachingSemaphore <- struct{}{}
+ defer func() { <-cachingSemaphore }()
+
+ var data []byte
+ var contentType string
+
+ // Handle data URLs
+ if strings.HasPrefix(imageURL, "data:") {
+ commaIndex := strings.Index(imageURL, ",")
+ if commaIndex == -1 {
+ recordInvalidImageID(imageID)
+ return "", false, fmt.Errorf("invalid data URL: no comma")
+ }
+ headerPart := imageURL[:commaIndex]
+ dataPart := imageURL[commaIndex+1:]
+
+ mediaType := "text/plain"
+ base64Encoded := false
+ if strings.HasPrefix(headerPart, "data:") {
+ mediaTypePart := headerPart[5:]
+ mediaTypeParts := strings.SplitN(mediaTypePart, ";", 2)
+ mediaType = mediaTypeParts[0]
+ if len(mediaTypeParts) > 1 {
+ for _, param := range strings.Split(mediaTypeParts[1], ";") {
+ param = strings.TrimSpace(param)
+ if param == "base64" {
+ base64Encoded = true
+ }
+ }
+ }
+ }
+
+ if base64Encoded {
+ data, _ = base64.StdEncoding.DecodeString(dataPart)
+ } else {
+ decodedStr, err := url.QueryUnescape(dataPart)
+ if err != nil {
+ data = []byte(dataPart)
+ } else {
+ data = []byte(decodedStr)
+ }
+ }
+
+ contentType = mediaType
+ } else {
+ // Download from HTTP URL
+ client := &http.Client{
+ Timeout: 15 * time.Second,
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+ },
+ }
+
+ req, err := http.NewRequest("GET", imageURL, nil)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+
+ // Add User-Agent
+ userAgent, err := GetUserAgent("Text-Search-Brave")
+ if err != nil {
+ printWarn("Error getting User-Agent: %v", err)
+ }
+ req.Header.Set("User-Agent", userAgent)
+
+ resp, err := client.Do(req)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+ defer resp.Body.Close()
+
+ data, err = io.ReadAll(resp.Body)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+
+ contentType = http.DetectContentType(data)
+ }
+
+ if !strings.HasPrefix(contentType, "image/") {
+ recordInvalidImageID(imageID)
+ return "", false, fmt.Errorf("URL did not return an image: %s", imageURL)
+ }
+
+ // SVG special case
+ if contentType == "image/svg+xml" {
+ err := os.WriteFile(tempImagePath, data, 0644)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+ err = os.Rename(tempImagePath, cachedImagePath)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+ cachingImagesMu.Lock()
+ delete(cachingImages, imageURL)
+ cachingImagesMu.Unlock()
+ return cachedImagePath, true, nil
+ }
+
+ // Decode image
+ img, err := safeDecodeImage(contentType, data)
+ if err != nil {
+ printErr("Failed to decode favicon: %s [%s] (%v)", imageURL, imageID, err)
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+
+ // Resize
+ maxSize := 16
+ width := img.Bounds().Dx()
+ height := img.Bounds().Dy()
+
+ if width > maxSize || height > maxSize {
+ dst := image.NewRGBA(image.Rect(0, 0, maxSize, maxSize))
+ draw.ApproxBiLinear.Scale(dst, dst.Bounds(), img, img.Bounds(), draw.Over, nil)
+ img = dst
+ }
+
+ // Save as WebP
+ outFile, err := os.Create(tempImagePath)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+ defer outFile.Close()
+
+ options := &webp.Options{Lossless: false, Quality: 80}
+ err = webp.Encode(outFile, img, options)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+
+ err = os.Rename(tempImagePath, cachedImagePath)
+ if err != nil {
+ recordInvalidImageID(imageID)
+ return "", false, err
+ }
+
+ cachingImagesMu.Lock()
+ delete(cachingImages, imageURL)
+ cachingImagesMu.Unlock()
+
+ return cachedImagePath, true, nil
+}
diff --git a/files-nyaa.go b/files-nyaa.go
new file mode 100644
index 0000000..db9f04a
--- /dev/null
+++ b/files-nyaa.go
@@ -0,0 +1,107 @@
+package main
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+const NYAA_DOMAIN = "nyaa.si"
+
+type Nyaa struct{}
+
+func NewNyaa() *Nyaa {
+ return &Nyaa{}
+}
+
+func (n *Nyaa) Name() string {
+ return "nyaa"
+}
+
+func (n *Nyaa) getCategoryCode(cat string) string {
+ switch cat {
+ case "all":
+ return ""
+ case "anime":
+ return "&c=1_0"
+ case "music":
+ return "&c=2_0"
+ case "game":
+ return "&c=6_2"
+ case "software":
+ return "&c=6_1"
+ default:
+ return "ignore"
+ }
+}
+
+func (n *Nyaa) Search(query string, category string) ([]TorrentResult, error) {
+ categoryCode := n.getCategoryCode(category)
+ if categoryCode == "ignore" {
+ return []TorrentResult{}, nil
+ }
+
+ searchURL := fmt.Sprintf("https://%s/?f=0&q=%s%s", NYAA_DOMAIN, url.QueryEscape(query), categoryCode)
+
+ userAgent, err := GetUserAgent("files-nyaa")
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("GET", searchURL, nil)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("User-Agent", userAgent)
+
+ resp, err := DoMetaProxyRequest(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("unexpected status: %d", resp.StatusCode)
+ }
+
+ doc, err := goquery.NewDocumentFromReader(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ var results []TorrentResult
+ doc.Find(".default, .success, .danger").Each(func(i int, s *goquery.Selection) {
+ tds := s.Find("td")
+ if tds.Length() < 7 {
+ return
+ }
+
+ title := tds.Eq(1).Find("a").Last().Text()
+ magnet, _ := tds.Eq(2).Find("a").Last().Attr("href")
+ sizeStr := strings.TrimSpace(tds.Eq(3).Text())
+ byteSize := parseSize(sizeStr)
+
+ seeders := parseInt(tds.Eq(5).Text())
+ leechers := parseInt(tds.Eq(6).Text())
+
+ results = append(results, TorrentResult{
+ URL: "https://" + NYAA_DOMAIN,
+ Title: title,
+ Magnet: applyTrackers(magnet),
+ Size: formatSize(byteSize),
+ Seeders: seeders,
+ Leechers: leechers,
+ Views: 0,
+ })
+ })
+
+ // Reverse the results slice, so It's from newest to oldest, but the orders is still kinda random
+ for i, j := 0, len(results)-1; i < j; i, j = i+1, j-1 {
+ results[i], results[j] = results[j], results[i]
+ }
+
+ return results, nil
+}
diff --git a/files-thepiratebay.go b/files-thepiratebay.go
index b98ee27..3045bf6 100644
--- a/files-thepiratebay.go
+++ b/files-thepiratebay.go
@@ -57,31 +57,34 @@ func (t *ThePirateBay) Search(query string, category string) ([]TorrentResult, e
return []TorrentResult{}, nil
}
- url := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
+ searchURL := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
// User Agent generation
userAgent, err := GetUserAgent("files-tpb")
if err != nil {
- fmt.Println("Error:", err)
- return nil, err
+ return nil, fmt.Errorf("error generating User-Agent: %w", err)
}
- req, err := http.NewRequest("GET", url, nil)
+ req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("User-Agent", userAgent)
- client := &http.Client{}
- response, err := client.Do(req)
+ // Perform the request using MetaProxy if enabled
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("error making request to The Pirate Bay: %w", err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
- defer response.Body.Close()
var torrentData []map[string]interface{}
- if err := json.NewDecoder(response.Body).Decode(&torrentData); err != nil {
- return nil, err
+ if err := json.NewDecoder(resp.Body).Decode(&torrentData); err != nil {
+ return nil, fmt.Errorf("error decoding response JSON: %w", err)
}
var results []TorrentResult
diff --git a/files-torrentgalaxy.go b/files-torrentgalaxy.go
index 51f51ca..5bcd05e 100644
--- a/files-torrentgalaxy.go
+++ b/files-torrentgalaxy.go
@@ -62,18 +62,17 @@ func (tg *TorrentGalaxy) Search(query string, category string) ([]TorrentResult,
// User Agent generation
userAgent, err := GetUserAgent("files-torrentgalaxy")
if err != nil {
- fmt.Println("Error:", err)
- return nil, err
+ return nil, fmt.Errorf("error generating User-Agent: %w", err)
}
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("User-Agent", userAgent)
- client := &http.Client{}
- resp, err := client.Do(req)
+ // Perform the request using MetaProxy if enabled
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
}
diff --git a/files.go b/files.go
index d0c1ff1..1c71940 100755
--- a/files.go
+++ b/files.go
@@ -28,13 +28,25 @@ var (
rutor TorrentSite
)
-var fileResultsChan = make(chan []TorrentResult)
+func initFileEngines() {
-func init() {
- torrentGalaxy = NewTorrentGalaxy()
- // nyaa = NewNyaa()
- thePirateBay = NewThePirateBay()
- // rutor = NewRutor()
+ torrentGalaxy = nil
+ thePirateBay = nil
+ nyaa = nil
+ // rutor = nil
+
+ for _, engineName := range config.MetaSearch.Files {
+ switch engineName {
+ case "TorrentGalaxy":
+ torrentGalaxy = NewTorrentGalaxy()
+ case "ThePirateBay":
+ thePirateBay = NewThePirateBay()
+ case "Nyaa":
+ nyaa = NewNyaa()
+ // case "Rutor":
+ // rutor = NewRutor()
+ }
+ }
}
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
@@ -52,7 +64,7 @@ func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string
data := map[string]interface{}{
"Results": combinedResults,
"Query": query,
- "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
+ "Fetched": FormatElapsedTime(elapsedTime),
"Category": "all",
"Sort": "seed",
"Page": page,
@@ -88,7 +100,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
case results := <-cacheChan:
if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled
- if config.CrawlerEnabled {
+ if config.MetaSearchEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -97,12 +109,12 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
printDebug("Crawler disabled; skipping fetching.")
}
} else {
- _, torrentResults, _, _ := convertToSpecificResults(results)
+ _, torrentResults, _, _, _ := convertToSpecificResults(results)
combinedResults = torrentResults
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
- if config.CrawlerEnabled {
+ if config.MetaSearchEnabled {
combinedResults = fetchFileResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -117,13 +129,13 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
// If Crawler is disabled, skip fetching from torrent sites
- if !config.CrawlerEnabled {
+ if !config.MetaSearchEnabled {
printInfo("Crawler is disabled; skipping torrent site fetching.")
return []TorrentResult{}
}
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
- results := []TorrentResult{}
+ var results []TorrentResult
for _, site := range sites {
if site == nil {
@@ -140,11 +152,6 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
}
}
- if len(results) == 0 {
- printWarn("No file results found for query: %s, trying other nodes", query)
- results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
- }
-
return results
}
@@ -167,33 +174,34 @@ func parseSize(sizeStr string) int64 {
return 0
}
- // Use regex to extract numeric value and unit separately
- re := regexp.MustCompile(`(?i)([\d.]+)\s*([KMGT]?B)`)
+ re := regexp.MustCompile(`(?i)([\d.]+)\s*(K?M?G?T?i?B)`)
matches := re.FindStringSubmatch(sizeStr)
if len(matches) < 3 {
printWarn("Error parsing size: invalid format %s", sizeStr)
return 0
}
- sizeStr = matches[1]
+ numStr := matches[1]
unit := strings.ToUpper(matches[2])
var multiplier int64 = 1
switch unit {
- case "KB":
+ case "B":
+ multiplier = 1
+ case "KB", "KIB":
multiplier = 1024
- case "MB":
+ case "MB", "MIB":
multiplier = 1024 * 1024
- case "GB":
+ case "GB", "GIB":
multiplier = 1024 * 1024 * 1024
- case "TB":
+ case "TB", "TIB":
multiplier = 1024 * 1024 * 1024 * 1024
default:
printWarn("Unknown unit: %s", unit)
return 0
}
- size, err := strconv.ParseFloat(sizeStr, 64)
+ size, err := strconv.ParseFloat(numStr, 64)
if err != nil {
printWarn("Error parsing size: %v", err)
return 0
@@ -219,16 +227,16 @@ func applyTrackers(magnetLink string) string {
}
func formatSize(size int64) string {
- if size >= 1024*1024*1024*1024 {
- return fmt.Sprintf("%.2f TB", float64(size)/(1024*1024*1024*1024))
- } else if size >= 1024*1024*1024 {
- return fmt.Sprintf("%.2f GB", float64(size)/(1024*1024*1024))
- } else if size >= 1024*1024 {
- return fmt.Sprintf("%.2f MB", float64(size)/(1024*1024))
- } else if size >= 1024 {
- return fmt.Sprintf("%.2f KB", float64(size)/1024)
+ const unit = 1024
+ if size < unit {
+ return fmt.Sprintf("%d B", size)
}
- return fmt.Sprintf("%d B", size)
+ div, exp := unit, 0
+ for n := size / unit; n >= unit; n /= unit {
+ div *= unit
+ exp++
+ }
+ return fmt.Sprintf("%.1f %siB", float64(size)/float64(div), []string{"K", "M", "G", "T", "P", "E"}[exp])
}
func sanitizeFileName(name string) string {
@@ -238,12 +246,3 @@ func sanitizeFileName(name string) string {
sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "")
return sanitized
}
-
-func contains(slice []string, item string) bool {
- for _, v := range slice {
- if v == item {
- return true
- }
- }
- return false
-}
diff --git a/forums.go b/forums.go
index bd57e55..a32bb66 100755
--- a/forums.go
+++ b/forums.go
@@ -3,54 +3,57 @@ package main
import (
"encoding/json"
"fmt"
- "math"
"net/http"
"net/url"
"time"
)
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
- if !config.CrawlerEnabled {
+ if !config.MetaSearchEnabled {
printDebug("Crawler is disabled; skipping forum search.")
return []ForumSearchResult{}, nil
}
const (
- pageSize = 25
- baseURL = "https://www.reddit.com"
- maxRetries = 5
- initialBackoff = 2 * time.Second
+ pageSize = 25
+ baseURL = "https://www.reddit.com"
)
+
var results []ForumSearchResult
+ offset := page * pageSize
+ searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d",
+ baseURL,
+ url.QueryEscape(query),
+ pageSize,
+ offset,
+ )
- searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
- var resp *http.Response
- var err error
-
- // Retry logic with exponential backoff
- for i := 0; i <= maxRetries; i++ {
- resp, err = http.Get(searchURL)
- if err != nil {
- return nil, fmt.Errorf("making request: %v", err)
- }
- if resp.StatusCode != http.StatusTooManyRequests {
- break
- }
-
- // Wait for some time before retrying
- backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
- time.Sleep(backoff)
+ // Create request
+ req, err := http.NewRequest("GET", searchURL, nil)
+ if err != nil {
+ return nil, fmt.Errorf("creating request: %v", err)
}
+ // Set User-Agent
+ userAgent, uaErr := GetUserAgent("Reddit-Forum-Search")
+ if uaErr != nil {
+ return nil, fmt.Errorf("getting user agent: %v", uaErr)
+ }
+ req.Header.Set("User-Agent", userAgent)
+
+ // Make request using MetaProxy logic
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
+ // Validate response status
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
+ // Parse JSON response
var searchResults map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
return nil, fmt.Errorf("decoding response: %v", err)
@@ -66,9 +69,9 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
return nil, fmt.Errorf("no children field in data")
}
+ // Extract search results
for _, post := range posts {
postData := post.(map[string]interface{})["data"].(map[string]interface{})
-
if safe == "active" && postData["over_18"].(bool) {
continue
}
@@ -78,6 +81,7 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
if len(description) > 500 {
description = description[:500] + "..."
}
+
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
permalink := postData["permalink"].(string)
resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
@@ -116,7 +120,7 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
"Query": query,
"Results": results,
"Page": page,
- "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
+ "Fetched": FormatElapsedTime(elapsedTime),
"HasPrevPage": page > 1,
"HasNextPage": len(results) >= 25,
"NoResults": len(results) == 0,
@@ -131,6 +135,15 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
renderTemplate(w, "forums.html", data)
}
+func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
+ results, err := PerformRedditSearch(query, safe, page)
+ if err != nil {
+ printWarn("Failed to fetch forum results: %v", err)
+ return nil
+ }
+ return results
+}
+
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
cacheChan := make(chan []SearchResult)
var combinedResults []ForumSearchResult
@@ -150,7 +163,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
case results := <-cacheChan:
if results == nil {
// Fetch only if the cache miss occurs and Crawler is enabled
- if config.CrawlerEnabled {
+ if config.MetaSearchEnabled {
combinedResults = fetchForumResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -164,7 +177,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
- if config.CrawlerEnabled {
+ if config.MetaSearchEnabled {
combinedResults = fetchForumResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
diff --git a/go.mod b/go.mod
index f7d89ad..b088e24 100644
--- a/go.mod
+++ b/go.mod
@@ -17,6 +17,7 @@ require (
github.com/blevesearch/bleve/v2 v2.4.4
github.com/chromedp/cdproto v0.0.0-20241022234722-4d5d5faf59fb
github.com/chromedp/chromedp v0.11.2
+ github.com/fyne-io/image v0.1.1
github.com/go-shiori/go-readability v0.0.0-20241012063810-92284fa8a71f
golang.org/x/net v0.33.0
)
@@ -55,11 +56,11 @@ require (
github.com/golang/snappy v0.0.4 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
+ github.com/jsummers/gobmp v0.0.0-20230614200233-a9de23ed2e25 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mschoch/smat v0.2.0 // indirect
- github.com/stretchr/testify v1.9.0 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
go.etcd.io/bbolt v1.3.11 // indirect
golang.org/x/sys v0.28.0 // indirect
diff --git a/go.sum b/go.sum
index 66cede6..752f0ed 100644
--- a/go.sum
+++ b/go.sum
@@ -56,6 +56,8 @@ github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHG
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/fyne-io/image v0.1.1 h1:WH0z4H7qfvNUw5l4p3bC1q70sa5+YWVt6HCj7y4VNyA=
+github.com/fyne-io/image v0.1.1/go.mod h1:xrfYBh6yspc+KjkgdZU/ifUC9sPA5Iv7WYUBzQKK7JM=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
@@ -84,6 +86,8 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/jsummers/gobmp v0.0.0-20230614200233-a9de23ed2e25 h1:YLvr1eE6cdCqjOe972w/cYF+FjW34v27+9Vo5106B4M=
+github.com/jsummers/gobmp v0.0.0-20230614200233-a9de23ed2e25/go.mod h1:kLgvv7o6UM+0QSf0QjAse3wReFDsb9qbZJdfexWlrQw=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
github.com/leonelquinteros/gotext v1.7.0 h1:jcJmF4AXqyamP7vuw2MMIKs+O3jAEmvrc5JQiI8Ht/8=
@@ -111,8 +115,8 @@ github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMT
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
-github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
diff --git a/ia-calc.go b/ia-calc.go
new file mode 100644
index 0000000..1e629e8
--- /dev/null
+++ b/ia-calc.go
@@ -0,0 +1,82 @@
+package main
+
+import (
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// Enhanced math expression parser
+func parseMathExpression(query string) (string, bool) {
+ // Clean and normalize the expression
+ query = strings.ReplaceAll(query, " ", "")
+ query = strings.ReplaceAll(query, ",", "")
+
+ // Regex to match valid math expressions
+ mathRegex := regexp.MustCompile(`^\d+(\.\d+)?([\+\-\*/\^]\d+(\.\d+)?)+$`)
+ if !mathRegex.MatchString(query) {
+ return "", false
+ }
+
+ // Operator precedence handling
+ operators := []struct {
+ symbol string
+ apply func(float64, float64) float64
+ }{
+ {"^", func(a, b float64) float64 {
+ result := 1.0
+ for i := 0; i < int(b); i++ {
+ result *= a
+ }
+ return result
+ }},
+ {"*", func(a, b float64) float64 { return a * b }},
+ {"/", func(a, b float64) float64 { return a / b }},
+ {"+", func(a, b float64) float64 { return a + b }},
+ {"-", func(a, b float64) float64 { return a - b }},
+ }
+
+ // Parse numbers and operators
+ var tokens []interface{}
+ current := ""
+ for _, char := range query {
+ if char >= '0' && char <= '9' || char == '.' {
+ current += string(char)
+ } else {
+ if current != "" {
+ num, _ := strconv.ParseFloat(current, 64)
+ tokens = append(tokens, num)
+ current = ""
+ }
+ tokens = append(tokens, string(char))
+ }
+ }
+ if current != "" {
+ num, _ := strconv.ParseFloat(current, 64)
+ tokens = append(tokens, num)
+ }
+
+ // Evaluate expression with operator precedence
+ for _, op := range operators {
+ for i := 1; i < len(tokens)-1; i += 2 {
+ if operator, ok := tokens[i].(string); ok && operator == op.symbol {
+ left := tokens[i-1].(float64)
+ right := tokens[i+1].(float64)
+ result := op.apply(left, right)
+
+ // Update tokens
+ tokens = append(tokens[:i-1], tokens[i+2:]...)
+ tokens = append(tokens[:i-1], append([]interface{}{result}, tokens[i-1:]...)...)
+ i -= 2 // Adjust index after modification
+ }
+ }
+ }
+
+ // Format result
+ result := tokens[0].(float64)
+ if result == float64(int(result)) {
+ return fmt.Sprintf("%d", int(result)), true
+ }
+ return fmt.Sprintf("%.2f", result), true
+}
diff --git a/ia-currency.go b/ia-currency.go
new file mode 100644
index 0000000..2f1de37
--- /dev/null
+++ b/ia-currency.go
@@ -0,0 +1,380 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+)
+
+// ExchangeRateCache holds currency rates with automatic refresh
+var (
+ exchangeRates = make(map[string]float64)
+ nextUpdateTime time.Time
+ exchangeCacheMutex sync.RWMutex
+ allCurrencies []string
+)
+
+// CurrencyAPIResponse structure for exchange rate API
+type CurrencyAPIResponse struct {
+ Rates map[string]float64 `json:"rates"`
+}
+
+// UpdateExchangeRates fetches and caches currency rates
+func UpdateExchangeRates() error {
+ exchangeCacheMutex.Lock()
+ defer exchangeCacheMutex.Unlock()
+
+ // Use a reliable free API with good rate limits
+ resp, err := http.Get("https://open.er-api.com/v6/latest/USD")
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+
+ body, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return err
+ }
+
+ var apiResponse struct {
+ Result string `json:"result"`
+ Rates map[string]float64 `json:"rates"`
+ TimeNextUpdateUnix int64 `json:"time_next_update_unix"`
+ }
+ if err := json.Unmarshal(body, &apiResponse); err != nil {
+ return err
+ }
+
+ if apiResponse.Result != "success" {
+ return fmt.Errorf("API error: %s", apiResponse.Result)
+ }
+
+ // Cache the rates
+ exchangeRates = apiResponse.Rates
+ nextUpdateTime = time.Unix(apiResponse.TimeNextUpdateUnix, 0)
+
+ // Update list of all currencies
+ allCurrencies = make([]string, 0, len(exchangeRates))
+ for currency := range exchangeRates {
+ allCurrencies = append(allCurrencies, currency)
+ }
+
+ printDebug("Updated currency rates: %d currencies cached", len(allCurrencies))
+ return nil
+}
+
+// PrecacheAllCurrencyPairs pre-caches conversion rates for all currency pairs
+func PrecacheAllCurrencyPairs() {
+ exchangeCacheMutex.RLock()
+ defer exchangeCacheMutex.RUnlock()
+
+ if len(exchangeRates) == 0 {
+ log.Println("Skipping precache - no rates available")
+ return
+ }
+
+ printDebug("Precaching all currency pairs (%d total)", len(exchangeRates))
+
+ for from := range exchangeRates {
+ for to := range exchangeRates {
+ if from == to {
+ continue
+ }
+ // Cache the cross-rate
+ GetExchangeRate(from, to)
+ }
+ }
+
+ printDebug("All currency pairs precached")
+}
+
+// GetExchangeRate gets the current exchange rate with caching
+func GetExchangeRate(from, to string) (float64, bool) {
+ // Auto-update cache if expired
+ if time.Now().After(nextUpdateTime) {
+ err := UpdateExchangeRates()
+ if err != nil {
+ printWarn("Currency update failed: %v", err)
+ // Postpone next attempt to avoid hammering API
+ nextUpdateTime = time.Now().Add(5 * time.Minute)
+ }
+ }
+
+ exchangeCacheMutex.RLock()
+ defer exchangeCacheMutex.RUnlock()
+
+ // Handle same currency
+ if from == to {
+ return 1, true
+ }
+
+ // Convert via USD if direct rate not available
+ fromRate, fromExists := exchangeRates[from]
+ toRate, toExists := exchangeRates[to]
+
+ if !fromExists || !toExists {
+ return 0, false
+ }
+
+ // Calculate cross rate: (1 USD / fromRate) * toRate
+ return toRate / fromRate, true
+}
+
+// ParseCurrencyConversion detects and processes currency conversion queries
+func ParseCurrencyConversion(query string) (float64, string, string, bool) {
+ // Main conversion phrases
+ conversionPhrases := []string{
+ // Universal/math
+ "➞", "→", "⇒", ">", "->", "=", "≈", "~", ":", "≡",
+ // English
+ "to", "in", "into", "as", "equals", "equal to", "equals to", "is", "becomes", "be", "makes", "converted to", "convert to", "convert into", "converted into",
+ "exchange for", "exchanged for", "value in", "as currency", "convert", "equivalent to", "same as", "is equal to", ">", "gives", "makes", "result is", "returns", "will be", "equals:", "is equivalent to", "≈", "~", ":",
+ // German (DE)
+ "auf", "in", "zu", "umrechnen in", "umrechnen zu", "als", "gleich", "ist", "ist gleich", "umwandeln in", "wird zu", "ergibt", "macht", "ist", "resultiert in", "gleichwertig mit",
+ // Spanish (ES)
+ "en", "a", "como", "igual a", "es", "es igual a", "es igual", "convertir a", "cambiar a", "valor en", "convierte en", "devuelve", "será", "equivale a", "es equivalente a",
+ // French (FR)
+ "vers", "en", "comme", "égal à", "est", "c'est", "convertir en", "changer en", "valeur en", "équivaut à", "sera", "fait", "rend", "est égal à", "équivalent à",
+ // Italian (IT)
+ "a", "in", "come", "uguale a", "è", "convertire in", "cambiare in", "valore in", "sarà", "fa", "equivale a", "è uguale a",
+ // Portuguese (PT/BR)
+ "para", "em", "como", "igual a", "é", "converter para", "trocar por", "valor em", "converte em", "vai ser", "faz", "equivale a", "é igual a", "é equivalente a",
+ // Dutch (NL)
+ "naar", "in", "als", "is gelijk aan", "is", "wordt", "omzetten naar", "waarde in", "gelijk aan", "is hetzelfde als",
+ // Czech (CS)
+ "na", "do", "jako", "rovná se", "je", "převést na", "výměna za", "hodnota v", "přepočet", "bude", "rovná", "je to", "je rovno", "je stejné jako",
+ // Slovak (SK)
+ "na", "do", "ako", "rovná sa", "je", "previesť na", "výměna za", "hodnota v", "prerátať", "bude", "rovná", "je to", "je rovné", "je rovnaké ako",
+ // Polish (PL)
+ "na", "w", "jako", "równa się", "jest", "przelicz na", "wymień na", "wartość w", "przelicza się na", "będzie", "to jest", "jest równy", "jest taki sam jak",
+ // Russian (RU)
+ "на", "в", "как", "равно", "есть", "конвертировать в", "обменять на", "значение в", "равняется", "будет", "это", "такое же как",
+ // Ukrainian (UA)
+ "на", "у", "як", "дорівнює", "є", "конвертувати у", "обміняти на", "значення в", "буде", "це", "таке саме як",
+ // Croatian / Serbian / Bosnian / Slovenian (HR/SR/BS/SL)
+ "na", "u", "za", "kao", "jednako", "je", "pretvori u", "zamijeniti za", "vrijednost u", "preračunaj u", "biti", "to je", "jednako kao", "je isto kao",
+ "v", "kot", "je enako", "pretvoriti v", "zamenjati za", "vrednost v", "je isto kao", "je enakovredno",
+ // Bulgarian (BG)
+ "на", "в", "като", "равно на", "е", "преобразувай в", "обмени на", "стойност в", "ще бъде", "това е", "равностойно на",
+ // Turkish (TR)
+ "için", "olarak", "eşittir", "bu", "dönüştür to", "değiştir to", "değer olarak", "olur", "eşit", "bu olur", "aynı olarak",
+ // Greek (EL)
+ "σε", "ως", "ίσον", "είναι", "μετατροπή σε", "ανταλλαγή με", "τιμή σε", "θα είναι", "αυτό είναι", "ισοδυναμεί με", "ίσο με",
+ // Chinese (Simplified and Traditional, ZH)
+ "到", "变为", "換成", "转换为", "等于", "等於", "是", "为", "結果是", "相等於", "等同於", "一樣",
+ // Japanese (JA)
+ "に", "として", "等しい", "は", "に変換", "に交換", "の値", "は", "結果は", "となる", "同じ", "等価", "等しく",
+ // Korean (KO)
+ "으로", "같이", "같다", "이다", "로 변환", "교환하다", "값", "이 된다", "와 같다", "같음", "동일하다",
+ // Arabic (AR)
+ "إلى", "الى", "في", "كـ", "يساوي", "هو", "تحويل إلى", "قيمة في", "يصبح", "يساوي نفس", "تعادل", "تساوي",
+ // Hebrew (HE)
+ "ל", "ב", "בתור", "שווה ל", "הוא", "המר ל", "ערך ב", "יהיה", "אותו הדבר כמו", "זהה ל",
+ // Romanian (RO)
+ "la", "în", "ca", "egal cu", "este", "converti la", "schimbă în", "valoare în", "va fi", "este egal cu",
+ // Hungarian (HU)
+ "ra", "re", "ba", "be", "mint", "egyenlő", "az", "átvált", "értéke", "lesz", "ugyanaz mint",
+ // Swedish (SE)
+ "till", "i", "som", "är", "är lika med", "omvandla till", "värde i", "blir", "är samma som",
+ // Danish (DK)
+ "til", "i", "som", "er", "er lig med", "konverter til", "værdi i", "bliver", "er det samme som",
+ // Norwegian (NO)
+ "til", "i", "som", "er", "er lik", "konverter til", "verdi i", "blir", "er det samme som",
+ // Finnish (FI)
+ "ksi", "in", "kuin", "on", "on yhtä kuin", "muunna", "arvo", "tulee olemaan", "sama kuin",
+ // Estonian (EE)
+ "ks", "sisse", "nagu", "on", "on võrdne", "teisendada", "väärtus", "saab olema", "sama mis",
+ // Latvian (LV)
+ "uz", "iekš", "kā", "ir", "ir vienāds ar", "konvertēt uz", "vērtība", "būs", "tāpat kā",
+ // Lithuanian (LT)
+ "į", "kaip", "yra", "yra lygus", "konvertuoti į", "vertė", "bus", "tas pats kaip",
+ // Persian (FA)
+ "به", "در", "مثل", "برابر با", "است", "تبدیل به", "ارزش در", "خواهد بود", "همانند",
+ // Hindi (HI)
+ "को", "में", "के रूप में", "बराबर", "है", "में बदलें", "मूल्य में", "होगा", "के समान",
+ // Thai (TH)
+ "ไปที่", "ใน", "เป็น", "เท่ากับ", "คือ", "แปลงเป็น", "ค่าใน", "จะเป็น", "เท่ากัน",
+ // Indonesian (ID)
+ "ke", "dalam", "sebagai", "sama dengan", "adalah", "konversi ke", "nilai dalam", "akan menjadi", "sama dengan",
+ // Vietnamese (VI)
+ "thành", "trong", "là", "bằng", "là", "chuyển đổi thành", "giá trị trong", "sẽ là", "tương đương với",
+ // Malay (MS)
+ "kepada", "dalam", "sebagai", "sama dengan", "ialah", "tukar ke", "nilai dalam", "akan jadi", "setara dengan",
+ // Filipino/Tagalog (TL)
+ "sa", "sa loob ng", "bilang", "katumbas ng", "ay", "i-convert sa", "halaga sa", "magiging", "pareho sa",
+ }
+
+ // Build the OR group for all currency conversion phrases to use in the regex pattern
+ var orGroup strings.Builder
+ for i, phrase := range conversionPhrases {
+ if i > 0 {
+ orGroup.WriteString("|")
+ }
+ // escape for regex with special symbols:
+ orGroup.WriteString(regexp.QuoteMeta(phrase))
+ }
+ regexPattern := fmt.Sprintf(`(?i)([\d,]+(?:\.\d+)?)\s*([^\d,]+?)\s+(?:%s)\s+([^\d,]+)`, orGroup.String())
+ re := regexp.MustCompile(regexPattern)
+ matches := re.FindStringSubmatch(query)
+ if len(matches) < 4 {
+ return 0, "", "", false
+ }
+
+ // Clean and parse amount
+ amountStr := strings.ReplaceAll(matches[1], ",", "")
+ amount, err := strconv.ParseFloat(amountStr, 64)
+ if err != nil {
+ return 0, "", "", false
+ }
+
+ // Normalize currency symbols
+ currencyMap := map[string]string{
+ // Major Global Currencies
+ "$": "USD", "usd": "USD", "dollar": "USD", "dollars": "USD", "buck": "USD", "bucks": "USD", "us dollar": "USD", "american dollar": "USD", "freedom units": "USD",
+ "€": "EUR", "eur": "EUR", "euro": "EUR", "euros": "EUR",
+ "£": "GBP", "gbp": "GBP", "pound": "GBP", "pounds": "GBP", "sterling": "GBP", "quid": "GBP", "pound sterling": "GBP",
+ "¥": "JPY", "jpy": "JPY", "yen": "JPY", "cn¥": "CNY", // Handle ¥ ambiguity with CN¥ for Chinese Yuan
+ "₩": "KRW", "krw": "KRW", "won": "KRW", "korean won": "KRW",
+ "₹": "INR", "inr": "INR", "rupee": "INR", "rupees": "INR", "indian rupee": "INR",
+ "₽": "RUB", "rub": "RUB", "ruble": "RUB", "rubles": "RUB", "russian ruble": "RUB",
+
+ // Americas
+ "c$": "CAD", "cad": "CAD", "canadian dollar": "CAD", "loonie": "CAD",
+ "a$": "AUD", "aud": "AUD", "australian dollar": "AUD", "aussie dollar": "AUD",
+ "nz$": "NZD", "nzd": "NZD", "new zealand dollar": "NZD", "kiwi": "NZD", "kiwi dollar": "NZD",
+ "r$": "BRL", "brl": "BRL", "real": "BRL", "reais": "BRL", "brazilian real": "BRL",
+ "mx$": "MXN", "mxn": "MXN", "mexican peso": "MXN", "mexican pesos": "MXN",
+ "col$": "COP", "cop": "COP", "colombian peso": "COP",
+ "s/": "PEN", "pen": "PEN", "sol": "PEN", "soles": "PEN", "peruvian sol": "PEN",
+ "clp$": "CLP", "clp": "CLP", "chilean peso": "CLP",
+ "arg$": "ARS", "ars": "ARS", "argentine peso": "ARS",
+
+ // Europe & CIS
+ "chf": "CHF", "fr": "CHF", "swiss franc": "CHF", "franc suisse": "CHF",
+ "sek": "SEK", "kr": "SEK", "swedish krona": "SEK", "swedish kronor": "SEK",
+ "nok": "NOK", "norwegian krone": "NOK", "norwegian kroner": "NOK",
+ "dkk": "DKK", "danish krone": "DKK", "danish kroner": "DKK",
+ "zł": "PLN", "pln": "PLN", "zloty": "PLN", "polish zloty": "PLN",
+ "tl": "TRY", "try": "TRY", "turkish lira": "TRY", "türk lirası": "TRY", "₺": "TRY",
+ "huf": "HUF", "ft": "HUF", "forint": "HUF", "hungarian forint": "HUF",
+ "czk": "CZK", "kč": "CZK", "czech koruna": "CZK",
+ "ron": "RON", "lei": "RON", "romanian leu": "RON",
+ "bgn": "BGN", "лв": "BGN", "bulgarian lev": "BGN",
+ "uah": "UAH", "₴": "UAH", "hryvnia": "UAH", "ukrainian hryvnia": "UAH",
+ "kzt": "KZT", "₸": "KZT", "tenge": "KZT", "kazakhstani tenge": "KZT",
+
+ // Asia/Pacific
+ "cny": "CNY", "rmb": "CNY", "yuan": "CNY", "renminbi": "CNY", "chinese yuan": "CNY",
+ "hk$": "HKD", "hkd": "HKD", "hong kong dollar": "HKD",
+ "s$": "SGD", "sgd": "SGD", "singapore dollar": "SGD",
+ "nt$": "TWD", "twd": "TWD", "taiwan dollar": "TWD", "new taiwan dollar": "TWD",
+ "฿": "THB", "thb": "THB", "baht": "THB", "thai baht": "THB",
+ "rp": "IDR", "idr": "IDR", "rupiah": "IDR", "indonesian rupiah": "IDR",
+ "₱": "PHP", "php": "PHP", "philippine peso": "PHP",
+ "rm": "MYR", "myr": "MYR", "ringgit": "MYR", "malaysian ringgit": "MYR",
+ "₫": "VND", "vnd": "VND", "dong": "VND", "vietnamese dong": "VND",
+ "₭": "LAK", "lak": "LAK", "kip": "LAK", "lao kip": "LAK",
+ "៛": "KHR", "khr": "KHR", "riel": "KHR", "cambodian riel": "KHR",
+
+ // Middle East & Africa
+ "₪": "ILS", "ils": "ILS", "shekel": "ILS", "new israeli shekel": "ILS",
+ "﷼": "SAR", "sr": "SAR", "sar": "SAR", "riyal": "SAR", "saudi riyal": "SAR",
+ "د.إ": "AED", "dh": "AED", "aed": "AED", "dirham": "AED", "uae dirham": "AED",
+ "egp": "EGP", "e£": "EGP", "egyptian pound": "EGP",
+ "zar": "ZAR", "r": "ZAR", "rand": "ZAR", "south african rand": "ZAR",
+ "₦": "NGN", "ngn": "NGN", "naira": "NGN", "nigerian naira": "NGN",
+ }
+
+ // Improved normalization function
+ normalizeCurrency := func(input string) string {
+ clean := strings.TrimSpace(strings.ToLower(input))
+ clean = strings.Join(strings.Fields(clean), " ")
+ // Direct map
+ if mapped, ok := currencyMap[clean]; ok {
+ return mapped
+ }
+ // Fuzzy match: for last word
+ words := strings.Fields(clean)
+ for i := 0; i < len(words); i++ {
+ sub := strings.Join(words[i:], " ")
+ if mapped, ok := currencyMap[sub]; ok {
+ return mapped
+ }
+ }
+ // Fuzzy match: try reducing phrase from the end
+ for i := len(words) - 1; i >= 0; i-- {
+ sub := strings.Join(words[:i], " ")
+ if mapped, ok := currencyMap[sub]; ok {
+ return mapped
+ }
+ }
+ // Handle currency symbols at the end (e.g. "100usd")
+ if len(clean) > 1 {
+ if symbol, ok := currencyMap[string(clean[len(clean)-1])]; ok {
+ return symbol
+ }
+ }
+ // Currency code fallback
+ if len(clean) == 3 {
+ upper := strings.ToUpper(clean)
+ exchangeCacheMutex.RLock()
+ defer exchangeCacheMutex.RUnlock()
+ if _, exists := exchangeRates[upper]; exists {
+ return upper
+ }
+ }
+ return strings.ToUpper(input)
+ }
+
+ fromCurr := normalizeCurrency(matches[2])
+ toCurr := normalizeCurrency(matches[3])
+
+ // Validate currencies exist in exchange rates
+ exchangeCacheMutex.RLock()
+ defer exchangeCacheMutex.RUnlock()
+ if _, fromExists := exchangeRates[fromCurr]; !fromExists {
+ return 0, "", "", false
+ }
+ if _, toExists := exchangeRates[toCurr]; !toExists {
+ return 0, "", "", false
+ }
+
+ return amount, fromCurr, toCurr, true
+}
+
+// ConvertCurrency handles the actual conversion
+func ConvertCurrency(amount float64, from, to string) (float64, bool) {
+ if from == to {
+ return amount, true
+ }
+
+ rate, ok := GetExchangeRate(from, to)
+ if !ok {
+ // Try to find similar currencies
+ from = strings.ToUpper(from)
+ to = strings.ToUpper(to)
+
+ // Check if we have the currency in our list
+ exchangeCacheMutex.RLock()
+ defer exchangeCacheMutex.RUnlock()
+
+ _, fromExists := exchangeRates[from]
+ _, toExists := exchangeRates[to]
+
+ if !fromExists || !toExists {
+ return 0, false
+ }
+
+ // Shouldn't happen due to the check above, but just in case
+ return 0, false
+ }
+
+ return amount * rate, true
+}
diff --git a/ia-main.go b/ia-main.go
new file mode 100644
index 0000000..0e90477
--- /dev/null
+++ b/ia-main.go
@@ -0,0 +1,100 @@
+package main
+
+import (
+ "fmt"
+ "time"
+)
+
+type InstantAnswer struct {
+ Type string // "calc", "unit_convert", "wiki", ...
+ Title string
+ Content interface{}
+}
+
+func detectInstantAnswer(query string) *InstantAnswer {
+ // Try currency conversion first (more specific)
+ if amount, from, to, ok := ParseCurrencyConversion(query); ok {
+ if result, ok := ConvertCurrency(amount, from, to); ok {
+ return &InstantAnswer{
+ Type: "currency",
+ Title: "Currency Conversion",
+ Content: map[string]interface{}{
+ "from": from,
+ "to": to,
+ "amount": amount,
+ "result": result,
+ "display": fmt.Sprintf("%.2f %s = %.2f %s", amount, from, result, to),
+ },
+ }
+ }
+ }
+
+ // Try math expression
+ if result, ok := parseMathExpression(query); ok {
+ return &InstantAnswer{
+ Type: "calc",
+ Title: "Calculation Result",
+ Content: result,
+ }
+ }
+
+ // Try weather instant answer
+ if city, forecast, ok := getWeatherForQuery(query); ok {
+ return &InstantAnswer{
+ Type: "weather",
+ Title: fmt.Sprintf("Weather in %s", city.Name),
+ Content: map[string]interface{}{
+ "city": city.Name,
+ "country": city.Country,
+ "lat": city.Lat,
+ "lon": city.Lon,
+ "current": forecast.Current,
+ "forecast": forecast.Forecast,
+ "display": fmt.Sprintf("%.1f°C, %s", forecast.Current.Temperature, forecast.Current.Condition),
+ },
+ }
+ }
+
+ // Try Wikipedia search
+ if title, text, link, ok := getWikipediaSummary(query); ok {
+ return &InstantAnswer{
+ Type: "wiki",
+ Title: title,
+ Content: map[string]string{
+ "text": text,
+ "link": link,
+ },
+ }
+ }
+
+ return nil
+}
+
+func initExchangeRates() {
+ // Initial synchronous load
+ if err := UpdateExchangeRates(); err != nil {
+ printErr("Initial exchange rate update failed: %v", err)
+ } else {
+ PrecacheAllCurrencyPairs()
+ }
+
+ // Pre-cache common wiki terms in background
+ go func() {
+ commonTerms := []string{"United States", "Europe", "Technology", "Science", "Mathematics"}
+ for _, term := range commonTerms {
+ getWikipediaSummary(term)
+ }
+ }()
+
+ // Periodically update cache
+ ticker := time.NewTicker(30 * time.Minute)
+ go func() {
+ for range ticker.C {
+ if err := UpdateExchangeRates(); err != nil {
+ printWarn("Periodic exchange rate update failed: %v", err)
+ } else {
+ PrecacheAllCurrencyPairs()
+ }
+ }
+ }()
+}
diff --git a/ia-weather.go b/ia-weather.go
new file mode 100644
index 0000000..5c9f532
--- /dev/null
+++ b/ia-weather.go
@@ -0,0 +1,367 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "regexp"
+ "strings"
+ "unicode"
+
+ "golang.org/x/text/unicode/norm"
+)
+
+type WeatherCity struct {
+ Name string
+ Country string
+ Lat float64
+ Lon float64
+}
+type WeatherCurrent struct {
+ Temperature float64
+ Wind float64
+ Humidity int
+ Condition string
+}
+type WeatherDay struct {
+ Date string
+ MinTemp float64
+ MaxTemp float64
+ Condition string
+}
+type WeatherForecast struct {
+ Current WeatherCurrent
+ Forecast []WeatherDay
+}
+
+func getWeatherForQuery(query string) (city WeatherCity, forecast WeatherForecast, ok bool) {
+ // Expanded multi-language weather keywords (40+ languages/dialects)
+ weatherWords := []string{
+ // English
+ "weather", "forecast", "temperature", "conditions", "meteorology", "outlook",
+ // Czech/Slovak
+ "počasí", "předpověď", "teplota", "vlhkost", "srážky", "vítr", "meteo",
+ // German
+ "wetter", "vorhersage", "temperatur", "wettervorhersage", "wetterbericht",
+ // French
+ "météo", "prévisions", "température", "conditions météo", "prévision météo",
+ // Spanish
+ "tiempo", "clima", "pronóstico", "temperatura", "meteorología", "previsión",
+ // Italian
+ "tempo", "meteo", "previsioni", "temperatura", "condizioni atmosferiche",
+ // Portuguese
+ "tempo", "clima", "previsão", "temperatura", "meteorologia",
+ // Polish
+ "pogoda", "prognoza", "temperatura", "warunki atmosferyczne",
+ // Russian
+ "погода", "прогноз", "температура", "метео", "метеопрогноз",
+ // Ukrainian
+ "погода", "прогноз", "температура", "метео",
+ // Dutch
+ "weer", "voorspelling", "temperatuur", "weersverwachting",
+ // Scandinavian
+ "väder", "prognos", "temperatur", // Swedish
+ "vær", "prognose", "temperatur", // Norwegian/Danish
+ "veður", "spá", "hitastig", // Icelandic
+ // East Asian
+ "天気", "予報", "気温", // Japanese (tenki, yohō, kion)
+ "날씨", "예보", "기온", // Korean (nalssi, yebo, gion)
+ "天气", "预报", "气温", // Chinese (tiānqì, yùbào, qìwēn)
+ // South Asian
+ "मौसम", "पूर्वानुमान", "तापमान", // Hindi (mausam, purvanumaan, taapmaan)
+ "আবহাওয়া", "পূর্বাভাস", "তাপমাত্রা", // Bengali (ābhawāẏā, pūrbābhāsa, tāpamātrā)
+ // Middle Eastern
+ "طقس", "توقعات", "درجة الحرارة", // Arabic (ṭaqs, tawaqquʿāt, darajat al-ḥarāra)
+ "آب و ہوا", "پیش گوئی", "درجہ حرارت", // Urdu (āb-o-hawā, peshgoī, daraja ḥarārat)
+ // Turkish
+ "hava", "tahmin", "sıcaklık", "hava durumu",
+ // Greek
+ "καιρός", "πρόβλεψη", "θερμοκρασία",
+ // Hebrew
+ "מזג אוויר", "תחזית", "טמפרטורה",
+ // Other European
+ "időkép", "előrejelzés", "hőmérséklet", // Hungarian
+ "vreme", "prognoză", "temperatură", // Romanian
+ "vrijeme", "prognoza", "temperatura", // Croatian/Serbian
+ // Global/Internet slang
+ "temp", "wx", "meteo", "wea", "forec",
+ }
+
+ // Enhanced multi-language prepositions
+ prepositions := []string{
+ // English
+ "in", "at", "for", "around", "near",
+ // Czech/Slovak
+ "v", "ve", "na", "do", "u", "při", "blízko", "okolí",
+ // German
+ "in", "bei", "an", "für", "um", "nahe",
+ // Romance
+ "en", "a", "au", "aux", "dans", // French
+ "en", "a", "de", // Spanish
+ "a", "in", "da", // Italian
+ "em", "no", "na", // Portuguese
+ // Slavic
+ "w", "we", "na", "dla", "pod", // Polish
+ "в", "на", "у", "к", "под", // Russian/Ukrainian
+ // Nordic
+ "i", "på", "hos", // Swedish/Danish/Norwegian
+ // Others
+ "في", "عند", "قرب", // Arabic (fī, ʿind, qurb)
+ "में", "पर", "के पास", // Hindi (mẽ, par, ke pās)
+ "で", "に", "の近く", // Japanese (de, ni, no chikaku)
+ "에서", "에", "근처", // Korean (eseo, e, geuncheo)
+ "在", "于", "附近", // Chinese (zài, yú, fùjìn)
+ }
+
+ // Always normalize query (lowercase + remove diacritics)
+ normalized := removeDiacritics(strings.ToLower(query))
+ hasWeather := false
+ for _, word := range weatherWords {
+ if strings.Contains(normalized, removeDiacritics(word)) {
+ hasWeather = true
+ break
+ }
+ }
+ if !hasWeather {
+ return city, forecast, false
+ }
+
+ // Improved location extraction with diacritic handling
+ loc := extractWeatherLocation(normalized, weatherWords, prepositions)
+ if loc == "" {
+ return city, forecast, false
+ }
+
+ // Geocode and get weather
+ return geocodeAndGetWeather(loc)
+}
+
+func extractWeatherLocation(query string, weatherWords, prepositions []string) string {
+ // Create normalized versions for matching
+ normWeatherWords := make([]string, len(weatherWords))
+ for i, w := range weatherWords {
+ normWeatherWords[i] = removeDiacritics(w)
+ }
+
+ normPrepositions := make([]string, len(prepositions))
+ for i, p := range prepositions {
+ normPrepositions[i] = removeDiacritics(p)
+ }
+
+ // Pattern 1: [weather_word] [preposition]? [location]
+ pattern1 := `(?:` + strings.Join(normWeatherWords, "|") + `)\s*(?:` + strings.Join(normPrepositions, "|") + `)?\s*(.+)`
+ re1 := regexp.MustCompile(pattern1)
+ if matches := re1.FindStringSubmatch(query); len(matches) > 1 {
+ loc := cleanLocation(matches[1], normPrepositions)
+ if loc != "" {
+ return loc
+ }
+ }
+
+ // Pattern 2: [location] [weather_word]
+ pattern2 := `(.+?)\s+(?:` + strings.Join(normWeatherWords, "|") + `)`
+ re2 := regexp.MustCompile(pattern2)
+ if matches := re2.FindStringSubmatch(query); len(matches) > 1 {
+ loc := cleanLocation(matches[1], normPrepositions)
+ if loc != "" {
+ return loc
+ }
+ }
+
+ // Pattern 3: Question format
+ questionPattern := `(?:how is|what is|what's|jak[ée]\s+je|wie ist|quel est|qu[eé]\s+tal|com'[èe])\s+(?:the )?(?:` +
+ strings.Join(normWeatherWords, "|") + `)\s*(?:` + strings.Join(normPrepositions, "|") + `)?\s*(.+)`
+ re3 := regexp.MustCompile(questionPattern)
+ if matches := re3.FindStringSubmatch(query); len(matches) > 1 {
+ loc := cleanLocation(matches[1], normPrepositions)
+ if loc != "" {
+ return loc
+ }
+ }
+
+ // Fallback with smarter exclusion
+ return extractByExclusion(query, normWeatherWords, normPrepositions)
+}
+
+func cleanLocation(loc string, prepositions []string) string {
+ // Create preposition set
+ prepSet := make(map[string]bool)
+ for _, p := range prepositions {
+ prepSet[p] = true
+ }
+
+ words := strings.Fields(loc)
+
+ // Remove leading prepositions
+ for len(words) > 0 && prepSet[words[0]] {
+ words = words[1:]
+ }
+
+ // Remove trailing prepositions
+ for len(words) > 0 && prepSet[words[len(words)-1]] {
+ words = words[:len(words)-1]
+ }
+
+ // Rejoin and clean
+ cleaned := strings.Join(words, " ")
+ return strings.Trim(cleaned, ",.?!:;()[]{}'\"")
+}
+
+// Remove diacritics implementation
+func removeDiacritics(s string) string {
+ var result []rune
+ for _, r := range norm.NFD.String(s) {
+ if unicode.Is(unicode.Mn, r) { // Mn: nonspacing marks
+ continue
+ }
+ result = append(result, r)
+ }
+ return string(result)
+}
+
+// Extract location by removing weather-related words
+func extractByExclusion(query string, weatherWords, prepositions []string) string {
+ // Create removal set
+ removeSet := make(map[string]bool)
+ for _, w := range weatherWords {
+ removeSet[w] = true
+ }
+ for _, p := range prepositions {
+ removeSet[p] = true
+ }
+
+ // Process query words
+ words := strings.Fields(query)
+ var locWords []string
+ for _, word := range words {
+ if !removeSet[word] {
+ locWords = append(locWords, word)
+ }
+ }
+
+ loc := strings.Join(locWords, " ")
+ return cleanLocation(loc, prepositions)
+}
+
+// // Improved location cleaning
+// func cleanLocation(loc string) string {
+// loc = strings.Trim(loc, ",.?!:;()[]{}'\"")
+
+// // Remove trailing verbs
+// verbs := []string{"is", "at", "for", "in", "v", "ve", "na", "do", "w", "en", "a"}
+// for _, v := range verbs {
+// loc = strings.TrimSuffix(loc, " "+v)
+// }
+
+// return loc
+// }
+
+// // Remove diacritics implementation
+// func removeDiacritics(s string) string {
+// var result []rune
+// for _, r := range norm.NFD.String(s) {
+// if unicode.Is(unicode.Mn, r) { // Mn: nonspacing marks
+// continue
+// }
+// result = append(result, r)
+// }
+// return string(result)
+// }
+
+func geocodeAndGetWeather(loc string) (WeatherCity, WeatherForecast, bool) {
+ var city WeatherCity
+ var forecast WeatherForecast
+ // 1. Geocode
+ geoURL := fmt.Sprintf("https://geocoding-api.open-meteo.com/v1/search?name=%s&count=1", urlQueryEscape(loc))
+ resp, err := http.Get(geoURL)
+ if err != nil {
+ return city, forecast, false
+ }
+ defer resp.Body.Close()
+ var geo struct {
+ Results []struct {
+ Name string `json:"name"`
+ Country string `json:"country"`
+ Lat float64 `json:"latitude"`
+ Lon float64 `json:"longitude"`
+ } `json:"results"`
+ }
+ if err := json.NewDecoder(resp.Body).Decode(&geo); err != nil || len(geo.Results) == 0 {
+ return city, forecast, false
+ }
+ g := geo.Results[0]
+ city = WeatherCity{
+ Name: g.Name,
+ Country: g.Country,
+ Lat: g.Lat,
+ Lon: g.Lon,
+ }
+ // 2. Weather (current + forecast)
+ weatherURL := fmt.Sprintf("https://api.open-meteo.com/v1/forecast?latitude=%f&longitude=%f¤t=temperature_2m,weather_code,wind_speed_10m,relative_humidity_2m&daily=temperature_2m_min,temperature_2m_max,weather_code&forecast_days=3&timezone=auto", g.Lat, g.Lon)
+ resp2, err := http.Get(weatherURL)
+ if err != nil {
+ return city, forecast, false
+ }
+ defer resp2.Body.Close()
+ var data struct {
+ Current struct {
+ Temp float64 `json:"temperature_2m"`
+ Wind float64 `json:"wind_speed_10m"`
+ Hum int `json:"relative_humidity_2m"`
+ Code int `json:"weather_code"`
+ } `json:"current"`
+ Daily struct {
+ Dates []string `json:"time"`
+ MinTemp []float64 `json:"temperature_2m_min"`
+ MaxTemp []float64 `json:"temperature_2m_max"`
+ Weather []int `json:"weather_code"`
+ } `json:"daily"`
+ }
+ body, _ := io.ReadAll(resp2.Body)
+ if err := json.Unmarshal(body, &data); err != nil {
+ return city, forecast, false
+ }
+ forecast.Current = WeatherCurrent{
+ Temperature: data.Current.Temp,
+ Wind: data.Current.Wind,
+ Humidity: data.Current.Hum,
+ Condition: weatherDescription(data.Current.Code),
+ }
+ for i := range data.Daily.Dates {
+ forecast.Forecast = append(forecast.Forecast, WeatherDay{
+ Date: data.Daily.Dates[i],
+ MinTemp: data.Daily.MinTemp[i],
+ MaxTemp: data.Daily.MaxTemp[i],
+ Condition: weatherDescription(data.Daily.Weather[i]),
+ })
+ }
+ return city, forecast, true
+}
+
+func weatherDescription(code int) string {
+ // Minimal mapping, can be expanded
+ switch code {
+ case 0:
+ return "Clear"
+ case 1, 2, 3:
+ return "Partly cloudy"
+ case 45, 48:
+ return "Fog"
+ case 51, 53, 55, 56, 57:
+ return "Drizzle"
+ case 61, 63, 65, 66, 67, 80, 81, 82:
+ return "Rain"
+ case 71, 73, 75, 77, 85, 86:
+ return "Snow"
+ case 95, 96, 99:
+ return "Thunderstorm"
+ default:
+ return "Unknown"
+ }
+}
+
+// Helper for safe query escaping
+func urlQueryEscape(s string) string {
+ return strings.ReplaceAll(strings.ReplaceAll(s, " ", "+"), "%", "")
+}
diff --git a/ia-wiki.go b/ia-wiki.go
new file mode 100644
index 0000000..3e9fe3c
--- /dev/null
+++ b/ia-wiki.go
@@ -0,0 +1,74 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+)
+
+// Wikipedia API response structure
+type WikipediaResponse struct {
+ Query struct {
+ Pages map[string]struct {
+ PageID int `json:"pageid"`
+ Title string `json:"title"`
+ Extract string `json:"extract"`
+ } `json:"pages"`
+ } `json:"query"`
+}
+
+// Get Wikipedia summary
+func getWikipediaSummary(query string) (title, text, link string, ok bool) {
+ // Clean and prepare query
+ query = strings.TrimSpace(query)
+ if query == "" {
+ return "", "", "", false
+ }
+
+ // API request
+ apiURL := fmt.Sprintf(
+ "https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&exintro&explaintext&redirects=1&titles=%s",
+ url.QueryEscape(query),
+ )
+
+ resp, err := http.Get(apiURL)
+ if err != nil {
+ return "", "", "", false
+ }
+ defer resp.Body.Close()
+
+ body, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return "", "", "", false
+ }
+
+ // Parse JSON response
+ var result WikipediaResponse
+ if err := json.Unmarshal(body, &result); err != nil {
+ return "", "", "", false
+ }
+
+ // Extract first valid page
+ for _, page := range result.Query.Pages {
+ if page.PageID == 0 || page.Extract == "" {
+ continue
+ }
+
+ // Format text
+ text = page.Extract
+ if len(text) > 500 {
+ text = text[:500] + "..."
+ }
+
+ // Create link
+ titleForURL := strings.ReplaceAll(page.Title, " ", "_")
+ link = fmt.Sprintf("https://en.wikipedia.org/wiki/%s", url.PathEscape(titleForURL))
+
+ return page.Title, text, link, true
+ }
+
+ return "", "", "", false
+}
diff --git a/images-bing.go b/images-bing.go
index b6a6aa6..f057ac5 100644
--- a/images-bing.go
+++ b/images-bing.go
@@ -18,8 +18,21 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
// Build the search URL
searchURL := buildBingSearchURL(query, page)
- // Make the HTTP request
- resp, err := http.Get(searchURL)
+ // Create the HTTP request
+ req, err := http.NewRequest("GET", searchURL, nil)
+ if err != nil {
+ return nil, 0, fmt.Errorf("creating request: %v", err)
+ }
+
+ // Set User-Agent
+ ImageUserAgent, err := GetUserAgent("Image-Search-Bing")
+ if err != nil {
+ return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
+ }
+ req.Header.Set("User-Agent", ImageUserAgent)
+
+ // Use MetaProxy if enabled
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
diff --git a/images-deviantart.go b/images-deviantart.go
index 3077640..171ac1a 100644
--- a/images-deviantart.go
+++ b/images-deviantart.go
@@ -87,15 +87,15 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
return nil, 0, err
}
- // Make the HTTP request with User-Agent header
- client := &http.Client{}
+ // Create the HTTP request
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
req.Header.Set("User-Agent", DeviantArtImageUserAgent)
- resp, err := client.Do(req)
+ // Perform the request using MetaProxy if enabled
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
@@ -182,7 +182,7 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
duration := time.Since(startTime)
- // Check if the number of results is one or less
+ // Check if the number of results is zero
if len(results) == 0 {
return nil, duration, fmt.Errorf("no images found")
}
diff --git a/images-imgur.go b/images-imgur.go
index 641f645..e085371 100644
--- a/images-imgur.go
+++ b/images-imgur.go
@@ -18,7 +18,21 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
var results []ImageSearchResult
searchURL := buildImgurSearchURL(query, page)
- resp, err := http.Get(searchURL)
+ // Create the HTTP request
+ req, err := http.NewRequest("GET", searchURL, nil)
+ if err != nil {
+ return nil, 0, fmt.Errorf("creating request: %v", err)
+ }
+
+ // Get the User-Agent string
+ imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
+ if err != nil {
+ return nil, 0, fmt.Errorf("getting user-agent: %v", err)
+ }
+ req.Header.Set("User-Agent", imgurUserAgent)
+
+ // Perform the HTTP request with MetaProxy if enabled
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
@@ -28,6 +42,7 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
+ // Parse the HTML document
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
@@ -76,12 +91,35 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
duration := time.Since(startTime) // Calculate the duration
+ if len(results) == 0 {
+ return nil, duration, fmt.Errorf("no images found")
+ }
+
return results, duration, nil
}
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
func scrapeImageFromImgurPage(pageURL string) string {
- resp, err := http.Get(pageURL)
+ req, err := http.NewRequest("GET", pageURL, nil)
+ if err != nil {
+ fmt.Printf("Error creating request for page: %v\n", err)
+ return ""
+ }
+
+ // Get the User-Agent string
+ imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
+ if err == nil {
+ req.Header.Set("User-Agent", imgurUserAgent)
+ }
+
+ // Perform the request using MetaProxy if enabled
+ var resp *http.Response
+ if config.MetaProxyEnabled && metaProxyClient != nil {
+ resp, err = metaProxyClient.Do(req)
+ } else {
+ client := &http.Client{}
+ resp, err = client.Do(req)
+ }
if err != nil {
fmt.Printf("Error fetching page: %v\n", err)
return ""
diff --git a/images-quant.go b/images-quant.go
index d85d0f9..ab5d677 100644
--- a/images-quant.go
+++ b/images-quant.go
@@ -97,7 +97,7 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
// Ensure count + offset is within acceptable limits
if offset+resultsPerPage > 250 {
- return nil, 0, fmt.Errorf("count + offset must be lower than 250 for quant")
+ return nil, 0, fmt.Errorf("count + offset must be lower than 250 for Qwant")
}
if safe == "" {
@@ -113,21 +113,21 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
offset,
safe)
- client := &http.Client{Timeout: 10 * time.Second}
-
+ // Create the HTTP request
req, err := http.NewRequest("GET", apiURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
+ // Get the User-Agent string
ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
if err != nil {
- return nil, 0, err
+ return nil, 0, fmt.Errorf("getting user-agent: %v", err)
}
+ req.Header.Set("User-Agent", ImageUserAgent)
- req.Header.Set("User-Agent", ImageUserAgent) // Quant seems to not like some specific User-Agent strings
-
- resp, err := client.Do(req)
+ // Perform the request with MetaProxy if enabled
+ resp, err := DoMetaProxyRequest(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
@@ -137,11 +137,13 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
+ // Parse the API response
var apiResp QwantAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
return nil, 0, fmt.Errorf("decoding response: %v", err)
}
+ // Process the results
var wg sync.WaitGroup
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
@@ -174,5 +176,9 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
duration := time.Since(startTime) // Calculate the duration
+ if len(results) == 0 {
+ return nil, duration, fmt.Errorf("no images found")
+ }
+
return results, duration, nil
}
diff --git a/images.go b/images.go
index a044013..87dceb5 100755
--- a/images.go
+++ b/images.go
@@ -10,12 +10,23 @@ import (
var imageSearchEngines []SearchEngine
-func init() {
- imageSearchEngines = []SearchEngine{
- {Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
- {Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
- {Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
- //{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
+var allImageSearchEngines = []SearchEngine{
+ {Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
+ {Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
+ {Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
+ // {Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // example
+}
+
+func initImageEngines() {
+ imageSearchEngines = nil
+
+ for _, engineName := range config.MetaSearch.Image {
+ for _, candidate := range allImageSearchEngines {
+ if candidate.Name == engineName {
+ imageSearchEngines = append(imageSearchEngines, candidate)
+ break
+ }
+ }
}
}
@@ -44,7 +55,7 @@ func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSett
data := map[string]interface{}{
"Results": combinedResults,
"Query": query,
- "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
+ "Fetched": FormatElapsedTime(elapsedTime),
"Page": page,
"HasPrevPage": page > 1,
"HasNextPage": len(combinedResults) >= 50,
@@ -86,8 +97,8 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
select {
case results := <-cacheChan:
if results == nil {
- if config.CrawlerEnabled {
- combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
+ if config.MetaSearchEnabled {
+ combinedResults = fetchImageResults(query, safe, lang, page, synchronous, true)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -96,13 +107,13 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
printDebug("Crawler disabled; skipping fetching from image search engines.")
}
} else {
- _, _, imageResults, _ := convertToSpecificResults(results)
+ _, _, imageResults, _, _ := convertToSpecificResults(results)
combinedResults = filterValidImages(imageResults)
}
case <-time.After(2 * time.Second):
printDebug("Cache check timeout")
- if config.CrawlerEnabled {
- combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
+ if config.MetaSearchEnabled {
+ combinedResults = fetchImageResults(query, safe, lang, page, synchronous, true)
if len(combinedResults) > 0 {
combinedResults = filterValidImages(combinedResults)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
@@ -115,15 +126,21 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
return combinedResults
}
-func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
+func fetchImageResults(query, safe, lang string, page int, synchronous bool, thumbsNeeded bool) []ImageSearchResult {
var results []ImageSearchResult
- // Check if CrawlerEnabled is false
- if !config.CrawlerEnabled {
+ // Check if MetaSearchEnabled is false
+ if !config.MetaSearchEnabled {
printDebug("Crawler is disabled; skipping image search engine fetching.")
return results
}
+ // This will not happen as during config load there is check to have at least something in search engine list
+ // if len(imageSearchEngines) == 0 {
+ // printWarn("No image search engines configured in imageSearchEngines")
+ // return nil
+ // }
+
engineCount := len(imageSearchEngines)
// Determine the engine to use based on the page number
@@ -163,7 +180,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
if config.DriveCacheEnabled {
// Cache the thumbnail image asynchronously
go func(imgResult ImageSearchResult) {
- _, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
+ _, success, err := cacheImage(imgResult.Thumb, imgResult.ID, "thumb")
if err != nil || !success {
printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
@@ -220,23 +237,25 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool) []I
imageURLMapMu.Unlock()
if config.DriveCacheEnabled {
- // Cache the thumbnail image asynchronously
- go func(imgResult ImageSearchResult) {
- _, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
- if err != nil || !success {
- printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
- removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
- }
- }(imageResult)
-
- // Set ProxyThumb to the proxy URL (initially placeholder)
- imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)
-
- // Set ProxyFull to the proxy URL
+ if thumbsNeeded {
+ go func(imgResult ImageSearchResult) {
+ _, success, err := cacheImage(imgResult.Thumb, imgResult.ID, "thumb")
+ if err != nil || !success {
+ printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
+ removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
+ }
+ }(imageResult)
+ imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)
+ } else {
+ imageResult.ProxyThumb = "" // fallback ?
+ }
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
} else {
- // Hard cache disabled, proxy both thumb and full images
- imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
+ if thumbsNeeded {
+ imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
+ } else {
+ imageResult.ProxyThumb = ""
+ }
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
}
diff --git a/indexer.go b/indexer.go
index c8cf6fe..0f5b5dd 100644
--- a/indexer.go
+++ b/indexer.go
@@ -1,3 +1,6 @@
+//go:build experimental
+// +build experimental
+
package main
import (
diff --git a/init-extra.go b/init-extra.go
new file mode 100644
index 0000000..c455d41
--- /dev/null
+++ b/init-extra.go
@@ -0,0 +1,148 @@
+//go:build experimental
+// +build experimental
+
+package main
+
+import (
+ "flag"
+ "os"
+ "path/filepath"
+)
+
+var config Config
+
+func main() {
+ // Command-line flags
+ portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
+ domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
+ skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
+ configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
+
+ // Parse command-line flags
+ flag.Parse()
+
+ // Override global configFilePath if --config flag is provided
+ if *configFlag != "" {
+ configFilePath = *configFlag
+ }
+
+ if *skipConfigFlag {
+ // Skip interactive configuration
+ if _, err := os.Stat(configFilePath); err == nil {
+ // Load from config file if it exists
+ config = loadConfig()
+ } else {
+ // Use defaults if config file does not exist
+ config = defaultConfig
+ saveConfig(config) // Save the defaults to config.ini
+ printInfo("Configuration saved to %s", configFilePath)
+ }
+ } else {
+ // Initialize configuration interactively or from config file
+ err := initConfig()
+ if err != nil {
+ printErr("Error during initialization: %v", err)
+ return
+ }
+ }
+
+ // Override with command-line arguments if provided
+ if *portFlag != 0 {
+ config.Port = *portFlag
+ }
+ if *domainFlag != "" {
+ config.Domain = *domainFlag
+ }
+
+ loadNodeConfig()
+
+ if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
+ InitProxies()
+ }
+
+ // Initiate Browser Agent updater
+ if config.MetaSearchEnabled || config.IndexerEnabled {
+ go periodicAgentUpdate()
+ }
+
+ // Load List of Meta Search Engines
+ if config.MetaSearchEnabled {
+ initTextEngines()
+ initImageEngines()
+ initFileEngines()
+ initPipedInstances()
+ initMusicEngines()
+ initExchangeRates()
+ }
+
+ InitializeLanguage("en") // Initialize language before generating OpenSearch
+ generateOpenSearchXML(config)
+
+ // Start the node client only if NodesEnabled is true
+ if config.NodesEnabled {
+ go startUnixSocketServer(config.NodeID)
+ printInfo("Node client started.")
+ } else {
+ printInfo("Node client is disabled.")
+ }
+
+ // Check if the cache directory exists when caching is enabled
+ if config.DriveCacheEnabled {
+ cacheDir := config.DriveCache.Path
+ imagesDir := filepath.Join(cacheDir, "images")
+
+ // Check if the directory already exists
+ if _, err := os.Stat(imagesDir); os.IsNotExist(err) {
+ // Try to create the directory since it doesn't exist
+ if err := os.MkdirAll(imagesDir, os.ModePerm); err != nil {
+ printErr("Error: Failed to create cache or images directory '%s': %v", imagesDir, err)
+ os.Exit(1) // Exit with a non-zero status to indicate an error
+ }
+ // Print a warning if the directory had to be created
+ printWarn("Warning: Created missing directory '%s'.", imagesDir)
+ }
+ }
+
+ // Start periodic cleanup of expired cache files
+ if config.DriveCacheEnabled {
+ go cleanExpiredCachedImages()
+ printInfo("Drive cache started.")
+ } else {
+ printInfo("Drive cache is disabled.")
+ }
+
+ // Start periodic cleanup of expired cache files
+ if config.RamCacheEnabled {
+ resultsCache = NewResultsCache()
+ geocodeCache = NewGeocodeCache()
+ printInfo("RAM cache started.")
+ } else {
+ printInfo("RAM cache is disabled.")
+ }
+
+ // Init indexer
+ if config.IndexerEnabled {
+ if err := downloadAndSetupDomainsCSV(); err != nil {
+ printErr("Failed to set up domains.csv: %v", err)
+ return
+ }
+
+ err := InitIndex()
+ if err != nil {
+ printErr("Failed to initialize index: %v", err)
+ }
+
+ webCrawlerInit()
+
+ printInfo("Indexer is enabled.")
+ } else {
+ printInfo("Indexer is disabled.")
+ }
+
+ // if len(config.MetaSearch.Text) == 0 {
+ // log.Fatal("No text search engines are enabled in config (MetaSearch.Text)")
+ // }
+ // fmt.Printf("Loaded config.MetaSearch.Text: %#v\n", config.MetaSearch.Text)
+
+ runServer()
+}
diff --git a/init.go b/init.go
index bf0d220..30f5345 100644
--- a/init.go
+++ b/init.go
@@ -1,3 +1,6 @@
+//go:build !experimental
+// +build !experimental
+
package main
import (
@@ -9,14 +12,21 @@ import (
var config Config
func main() {
+
// Command-line flags
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
+ configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
// Parse command-line flags
flag.Parse()
+ // Override global configFilePath if --config flag is provided
+ if *configFlag != "" {
+ configFilePath = *configFlag
+ }
+
if *skipConfigFlag {
// Skip interactive configuration
if _, err := os.Stat(configFilePath); err == nil {
@@ -45,37 +55,28 @@ func main() {
config.Domain = *domainFlag
}
- loadNodeConfig()
-
- if config.AuthCode == "" {
- config.AuthCode = generateStrongRandomString(64)
- printInfo("Generated connection code: %s\n", config.AuthCode)
- saveConfig(config)
+ if config.MetaProxyEnabled {
+ InitProxies()
}
- // Generate Host ID
- hostID, nodeErr := generateHostID()
- if nodeErr != nil {
- printErr("Failed to generate host ID: %v", nodeErr)
- }
- config.PeerID = hostID
-
// Initiate Browser Agent updater
- if config.CrawlerEnabled || config.IndexerEnabled {
+ if config.MetaSearchEnabled || config.IndexerEnabled {
go periodicAgentUpdate()
}
+ // Load List of Meta Search Engines
+ if config.MetaSearchEnabled {
+ initTextEngines()
+ initImageEngines()
+ initFileEngines()
+ initPipedInstances()
+ initMusicEngines()
+ initExchangeRates()
+ }
+
InitializeLanguage("en") // Initialize language before generating OpenSearch
generateOpenSearchXML(config)
- // Start the node client only if NodesEnabled is true
- if config.NodesEnabled {
- go startNodeClient()
- printInfo("Node client started.")
- } else {
- printInfo("Node client is disabled.")
- }
-
// Check if the cache directory exists when caching is enabled
if config.DriveCacheEnabled {
cacheDir := config.DriveCache.Path
@@ -110,29 +111,10 @@ func main() {
printInfo("RAM cache is disabled.")
}
- // Init indexer
- if config.IndexerEnabled {
- if err := downloadAndSetupDomainsCSV(); err != nil {
- printErr("Failed to set up domains.csv: %v", err)
- return
- }
-
- err := InitIndex()
- if err != nil {
- printErr("Failed to initialize index: %v", err)
- }
-
- webCrawlerInit()
-
- // No longer needed as crawled data are indexed imidietly
- // // Start periodic indexing (every 2 minutes)
- // dataFilePath := filepath.Join(config.DriveCache.Path, "data_to_index.txt")
- // startPeriodicIndexing(dataFilePath, 2*time.Minute)
-
- printInfo("Indexer is enabled.")
- } else {
- printInfo("Indexer is disabled.")
- }
+ // if len(config.MetaSearch.Text) == 0 {
+ // log.Fatal("No text search engines are enabled in config (MetaSearch.Text)")
+ // }
+ // fmt.Printf("Loaded config.MetaSearch.Text: %#v\n", config.MetaSearch.Text)
runServer()
}
diff --git a/lang/af/LC_MESSAGES/default.po b/lang/af/LC_MESSAGES/default.po
index 57fdf5d..183df07 100644
--- a/lang/af/LC_MESSAGES/default.po
+++ b/lang/af/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Soek vir nuwe resultate..."
+msgstr "Soek vir nuwe resultate"
msgid "previous"
msgstr "Vorige"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Volgende"
msgid "fetched_in"
-msgstr "Verkry in %s sekondes"
+msgstr "Verkry in %s"
msgid "sort_seeders"
msgstr "Aantal saaiers"
@@ -184,8 +184,6 @@ msgstr "Strate"
msgid "satellite"
msgstr "Satelliet"
-msgid "esri_satellite"
-msgstr "Esri Satelliet"
msgid "topographic"
msgstr "Topografiese"
@@ -198,3 +196,9 @@ msgstr "Jy is binne "
msgid "meters_from_point"
msgstr "meter van hierdie punt af"
+
+msgid "seconds"
+msgstr "Sekondes"
+
+msgid "milliseconds"
+msgstr "Millisekondes"
diff --git a/lang/ar/LC_MESSAGES/default.po b/lang/ar/LC_MESSAGES/default.po
index 65ce544..90b5c5b 100644
--- a/lang/ar/LC_MESSAGES/default.po
+++ b/lang/ar/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "تورنتات"
msgid "searching_for_new_results"
-msgstr "جاري البحث عن نتائج جديدة..."
+msgstr "جاري البحث عن نتائج جديدة"
msgid "previous"
msgstr "السابق"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "التالي"
msgid "fetched_in"
-msgstr "تم التحميل في %s ثوانٍ"
+msgstr "تم التحميل في %s"
msgid "sort_seeders"
msgstr "عدد المزودين"
@@ -184,8 +184,6 @@ msgstr "شوارع"
msgid "satellite"
msgstr "قمر صناعي"
-msgid "esri_satellite"
-msgstr "قمر صناعي ESRI"
msgid "topographic"
msgstr "طوبوغرافي"
@@ -198,3 +196,9 @@ msgstr "أنت على بعد "
msgid "meters_from_point"
msgstr "أمتار من هذه النقطة"
+
+msgid "seconds"
+msgstr "ثواني"
+
+msgid "milliseconds"
+msgstr "ميلي ثانية"
diff --git a/lang/be/LC_MESSAGES/default.po b/lang/be/LC_MESSAGES/default.po
index 181b9a6..29025ae 100644
--- a/lang/be/LC_MESSAGES/default.po
+++ b/lang/be/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торэнты"
msgid "searching_for_new_results"
-msgstr "Пошук новых вынікаў..."
+msgstr "Пошук новых вынікаў"
msgid "previous"
msgstr "Папярэдняе"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Наступнае"
msgid "fetched_in"
-msgstr "Загружана за %s секунд"
+msgstr "Загружана за %s"
msgid "sort_seeders"
msgstr "Па колькасці сейдэраў"
@@ -184,8 +184,6 @@ msgstr "Вуліцы"
msgid "satellite"
msgstr "Спадарожнік"
-msgid "esri_satellite"
-msgstr "Спадарожнік ESRI"
msgid "topographic"
msgstr "Тапаграфічная"
@@ -198,4 +196,9 @@ msgstr "Вы знаходзіцеся на адлегласці"
msgid "meters_from_point"
msgstr "метраў ад гэтага пункта"
-
\ No newline at end of file
+
+msgid "seconds"
+msgstr "Секунды"
+
+msgid "milliseconds"
+msgstr "Мілісекунды"
diff --git a/lang/bg/LC_MESSAGES/default.po b/lang/bg/LC_MESSAGES/default.po
index d2d7806..92648fd 100644
--- a/lang/bg/LC_MESSAGES/default.po
+++ b/lang/bg/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торенти"
msgid "searching_for_new_results"
-msgstr "Търсят се нови резултати..."
+msgstr "Търсят се нови резултати"
msgid "previous"
msgstr "Предишен"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Следващ"
msgid "fetched_in"
-msgstr "Заредено за %s секунди"
+msgstr "Заредено за %s"
msgid "sort_seeders"
msgstr "Сийдъри (качване)"
@@ -184,8 +184,6 @@ msgstr "Улици"
msgid "satellite"
msgstr "Сателит"
-msgid "esri_satellite"
-msgstr "ESRI Сателит"
msgid "topographic"
msgstr "Топографска"
@@ -198,3 +196,9 @@ msgstr "Намирате се на "
msgid "meters_from_point"
msgstr "метра от тази точка"
+
+msgid "seconds"
+msgstr "Секунди"
+
+msgid "milliseconds"
+msgstr "Милисекунди"
diff --git a/lang/ca/LC_MESSAGES/default.po b/lang/ca/LC_MESSAGES/default.po
index c71e54e..cb886e8 100644
--- a/lang/ca/LC_MESSAGES/default.po
+++ b/lang/ca/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Cercant nous resultats..."
+msgstr "Cercant nous resultats"
msgid "previous"
msgstr "Anterior"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Següent"
msgid "fetched_in"
-msgstr "Recuperat en %s segons"
+msgstr "Recuperat en %s"
msgid "sort_seeders"
msgstr "Ordena per fonts"
@@ -184,8 +184,6 @@ msgstr "Carrers"
msgid "satellite"
msgstr "Satèl·lit"
-msgid "esri_satellite"
-msgstr "Satèl·lit ESRI"
msgid "topographic"
msgstr "Topogràfic"
@@ -198,3 +196,9 @@ msgstr "Ets a "
msgid "meters_from_point"
msgstr "metres d'aquest punt"
+
+msgid "seconds"
+msgstr "Segons"
+
+msgid "milliseconds"
+msgstr "Mil·lisegons"
diff --git a/lang/cs/LC_MESSAGES/default.po b/lang/cs/LC_MESSAGES/default.po
index 52569b5..fdd1806 100644
--- a/lang/cs/LC_MESSAGES/default.po
+++ b/lang/cs/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenty"
msgid "searching_for_new_results"
-msgstr "Hledám nové výsledky..."
+msgstr "Hledám nové výsledky"
msgid "previous"
msgstr "Předchozí"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Další"
msgid "fetched_in"
-msgstr "Načteno za %s sekund"
+msgstr "Načteno za %s"
msgid "sort_seeders"
msgstr "Počet seedů"
@@ -184,8 +184,6 @@ msgstr "Ulice"
msgid "satellite"
msgstr "Satelitní"
-msgid "esri_satellite"
-msgstr "Esri Satelitní"
msgid "topographic"
msgstr "Topografická"
@@ -197,4 +195,9 @@ msgid "you_are_within"
msgstr "Jste v dosahu "
msgid "meters_from_point"
-msgstr "metrů od tohoto bodu"
\ No newline at end of file
+msgstr "metrů od tohoto bodu"
+msgid "seconds"
+msgstr "Sekundy"
+
+msgid "milliseconds"
+msgstr "Milisekundy"
diff --git a/lang/da/LC_MESSAGES/default.po b/lang/da/LC_MESSAGES/default.po
index 2a50071..bc163b2 100644
--- a/lang/da/LC_MESSAGES/default.po
+++ b/lang/da/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenter"
msgid "searching_for_new_results"
-msgstr "Søger efter nye resultater..."
+msgstr "Søger efter nye resultater"
msgid "previous"
msgstr "Forrige"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Næste"
msgid "fetched_in"
-msgstr "Hentet på %s sekunder"
+msgstr "Hentet på %s"
msgid "sort_seeders"
msgstr "Sorter efter seeders"
@@ -184,8 +184,6 @@ msgstr "Gader"
msgid "satellite"
msgstr "Satellit"
-msgid "esri_satellite"
-msgstr "ESRI Satellit"
msgid "topographic"
msgstr "Topografisk"
@@ -198,3 +196,9 @@ msgstr "Du er inden for "
msgid "meters_from_point"
msgstr "meter fra dette punkt"
+
+msgid "seconds"
+msgstr "Sekunder"
+
+msgid "milliseconds"
+msgstr "Millisekunder"
diff --git a/lang/de/LC_MESSAGES/default.po b/lang/de/LC_MESSAGES/default.po
index e5c3c88..8de8282 100644
--- a/lang/de/LC_MESSAGES/default.po
+++ b/lang/de/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Suche nach neuen Ergebnissen..."
+msgstr "Suche nach neuen Ergebnissen"
msgid "previous"
msgstr "Vorherige"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Nächste"
msgid "fetched_in"
-msgstr "Abgerufen in %s Sekunden"
+msgstr "Abgerufen in %s"
msgid "sort_seeders"
msgstr "Sortieren nach Seeders"
@@ -184,8 +184,6 @@ msgstr "Straßen"
msgid "satellite"
msgstr "Satellit"
-msgid "esri_satellite"
-msgstr "ESRI-Satellit"
msgid "topographic"
msgstr "Topographisch"
@@ -198,3 +196,9 @@ msgstr "Sie befinden sich innerhalb von "
msgid "meters_from_point"
msgstr "Metern von diesem Punkt entfernt"
+
+msgid "seconds"
+msgstr "Sekunden"
+
+msgid "milliseconds"
+msgstr "Millisekunden"
diff --git a/lang/el/LC_MESSAGES/default.po b/lang/el/LC_MESSAGES/default.po
index eafb2fe..defbc98 100644
--- a/lang/el/LC_MESSAGES/default.po
+++ b/lang/el/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Αναζήτηση νέων αποτελεσμάτων..."
+msgstr "Αναζήτηση νέων αποτελεσμάτων"
msgid "previous"
msgstr "Προηγούμενο"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Επόμενο"
msgid "fetched_in"
-msgstr "Ανακτήθηκε σε %s δευτερόλεπτα"
+msgstr "Ανακτήθηκε σε %s"
msgid "sort_seeders"
msgstr "Ταξινόμηση κατά seeders"
@@ -184,8 +184,6 @@ msgstr "Δρόμοι"
msgid "satellite"
msgstr "Δορυφόρος"
-msgid "esri_satellite"
-msgstr "ESRI Δορυφόρος"
msgid "topographic"
msgstr "Τοπογραφικός"
@@ -198,3 +196,9 @@ msgstr "Βρίσκεστε εντός "
msgid "meters_from_point"
msgstr "μέτρων από αυτό το σημείο"
+
+msgid "seconds"
+msgstr "Δευτερόλεπτα"
+
+msgid "milliseconds"
+msgstr "Χιλιοστά του δευτερολέπτου"
diff --git a/lang/en/LC_MESSAGES/default.po b/lang/en/LC_MESSAGES/default.po
index eb0843d..e00fd81 100644
--- a/lang/en/LC_MESSAGES/default.po
+++ b/lang/en/LC_MESSAGES/default.po
@@ -65,7 +65,7 @@ msgid "site_name"
msgstr "QGato"
msgid "site_description"
-msgstr "QGato - Private & Open"
+msgstr "A open-source private search engine."
msgid "site_tags"
msgstr "search, qgato, spitfire"
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Searching for new results..."
+msgstr "Searching for new results"
msgid "previous"
msgstr "Previous"
@@ -116,7 +116,13 @@ msgid "next"
msgstr "Next"
msgid "fetched_in"
-msgstr "Fetched in %s seconds"
+msgstr "Fetched in %s"
+
+msgid "seconds"
+msgstr "seconds"
+
+msgid "milliseconds"
+msgstr "milliseconds"
msgid "sort_seeders"
msgstr "Number of Seeders"
@@ -184,8 +190,6 @@ msgstr "Streets"
msgid "satellite"
msgstr "Satellite"
-msgid "esri_satellite"
-msgstr "Esri Satellite"
msgid "topographic"
msgstr "Topographic"
@@ -198,3 +202,9 @@ msgstr "You are within "
msgid "meters_from_point"
msgstr "meters from this point"
+
+msgid "seconds"
+msgstr "Seconds"
+
+msgid "milliseconds"
+msgstr "Milliseconds"
diff --git a/lang/eo/LC_MESSAGES/default.po b/lang/eo/LC_MESSAGES/default.po
index e0805a9..492ce04 100644
--- a/lang/eo/LC_MESSAGES/default.po
+++ b/lang/eo/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torentoj"
msgid "searching_for_new_results"
-msgstr "Serĉante novajn rezultojn..."
+msgstr "Serĉante novajn rezultojn"
msgid "previous"
msgstr "Antaŭa"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Sekva"
msgid "fetched_in"
-msgstr "Prenita en %s sekundoj"
+msgstr "Prenita en %s"
msgid "sort_seeders"
msgstr "Ordigi laŭ semantoj"
@@ -184,8 +184,6 @@ msgstr "Stratoj"
msgid "satellite"
msgstr "Satelito"
-msgid "esri_satellite"
-msgstr "ESRI Satelito"
msgid "topographic"
msgstr "Topografia"
@@ -198,3 +196,9 @@ msgstr "Vi estas ene de "
msgid "meters_from_point"
msgstr "metroj de ĉi tiu punkto"
+
+msgid "seconds"
+msgstr "Sekundoj"
+
+msgid "milliseconds"
+msgstr "Milisekundoj"
diff --git a/lang/es/LC_MESSAGES/default.po b/lang/es/LC_MESSAGES/default.po
index db03c59..6a7aaef 100644
--- a/lang/es/LC_MESSAGES/default.po
+++ b/lang/es/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Buscando nuevos resultados..."
+msgstr "Buscando nuevos resultados"
msgid "previous"
msgstr "Anterior"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Siguiente"
msgid "fetched_in"
-msgstr "Obtenido en %s segundos"
+msgstr "Obtenido en %s"
msgid "sort_seeders"
msgstr "Ordenar por seeders"
@@ -184,8 +184,6 @@ msgstr "Calles"
msgid "satellite"
msgstr "Satélite"
-msgid "esri_satellite"
-msgstr "Satélite ESRI"
msgid "topographic"
msgstr "Topográfico"
@@ -198,3 +196,9 @@ msgstr "Estás dentro de "
msgid "meters_from_point"
msgstr "metros de este punto"
+
+msgid "seconds"
+msgstr "Segundos"
+
+msgid "milliseconds"
+msgstr "Milisegundos"
diff --git a/lang/et/LC_MESSAGES/default.po b/lang/et/LC_MESSAGES/default.po
index f578b1f..eaf212c 100644
--- a/lang/et/LC_MESSAGES/default.po
+++ b/lang/et/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrendid"
msgid "searching_for_new_results"
-msgstr "Otsitakse uusi tulemusi..."
+msgstr "Otsitakse uusi tulemusi"
msgid "previous"
msgstr "Eelmine"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Järgmine"
msgid "fetched_in"
-msgstr "Laaditud %s sekundiga"
+msgstr "Laaditud %s"
msgid "sort_seeders"
msgstr "Sorteeri külvajate järgi"
@@ -184,8 +184,6 @@ msgstr "Tänavad"
msgid "satellite"
msgstr "Satelliit"
-msgid "esri_satellite"
-msgstr "ESRI Satelliit"
msgid "topographic"
msgstr "Topograafiline"
@@ -198,3 +196,9 @@ msgstr "Olete "
msgid "meters_from_point"
msgstr "meetri kaugusel sellest punktist"
+
+msgid "seconds"
+msgstr "Sekundit"
+
+msgid "milliseconds"
+msgstr "Millisekundit"
diff --git a/lang/fa/LC_MESSAGES/default.po b/lang/fa/LC_MESSAGES/default.po
index d4e4e5d..f3f2f7f 100644
--- a/lang/fa/LC_MESSAGES/default.po
+++ b/lang/fa/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "تورنتها"
msgid "searching_for_new_results"
-msgstr "در حال جستجوی نتایج جدید..."
+msgstr "در حال جستجوی نتایج جدید"
msgid "previous"
msgstr "قبلی"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "بعدی"
msgid "fetched_in"
-msgstr "بازیابی شده در %s ثانیه"
+msgstr "بازیابی شده در %s"
msgid "sort_seeders"
msgstr "مرتبسازی بر اساس سیدرها"
@@ -184,8 +184,6 @@ msgstr "خیابانها"
msgid "satellite"
msgstr "ماهواره"
-msgid "esri_satellite"
-msgstr "ماهواره ESRI"
msgid "topographic"
msgstr "توپوگرافی"
@@ -198,3 +196,9 @@ msgstr "شما در فاصله "
msgid "meters_from_point"
msgstr "متری از این نقطه قرار دارید"
+
+msgid "seconds"
+msgstr "ثانیه"
+
+msgid "milliseconds"
+msgstr "میلیثانیه"
diff --git a/lang/fi/LC_MESSAGES/default.po b/lang/fi/LC_MESSAGES/default.po
index 42daf70..1539958 100644
--- a/lang/fi/LC_MESSAGES/default.po
+++ b/lang/fi/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrentit"
msgid "searching_for_new_results"
-msgstr "Haetaan uusia tuloksia..."
+msgstr "Haetaan uusia tuloksia"
msgid "previous"
msgstr "Edellinen"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Seuraava"
msgid "fetched_in"
-msgstr "Haettu %s sekunnissa"
+msgstr "Haettu %s"
msgid "sort_seeders"
msgstr "Lajittele lähettäjien mukaan"
@@ -184,8 +184,6 @@ msgstr "Kadut"
msgid "satellite"
msgstr "Satelliitti"
-msgid "esri_satellite"
-msgstr "ESRI Satelliitti"
msgid "topographic"
msgstr "Topografinen"
@@ -198,3 +196,9 @@ msgstr "Olet "
msgid "meters_from_point"
msgstr "metrin päässä tästä pisteestä"
+
+msgid "seconds"
+msgstr "Sekuntia"
+
+msgid "milliseconds"
+msgstr "Millisekuntia"
diff --git a/lang/fr/LC_MESSAGES/default.po b/lang/fr/LC_MESSAGES/default.po
index d1a9c87..437639d 100644
--- a/lang/fr/LC_MESSAGES/default.po
+++ b/lang/fr/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Recherche de nouveaux résultats..."
+msgstr "Recherche de nouveaux résultats"
msgid "previous"
msgstr "Précédent"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Suivant"
msgid "fetched_in"
-msgstr "Récupéré en %s secondes"
+msgstr "Récupéré en %s"
msgid "sort_seeders"
msgstr "Trier par seeders"
@@ -184,8 +184,6 @@ msgstr "Rues"
msgid "satellite"
msgstr "Satellite"
-msgid "esri_satellite"
-msgstr "Satellite ESRI"
msgid "topographic"
msgstr "Topographique"
@@ -198,3 +196,9 @@ msgstr "Vous êtes à "
msgid "meters_from_point"
msgstr "mètres de ce point"
+
+msgid "seconds"
+msgstr "Secondes"
+
+msgid "milliseconds"
+msgstr "Millisecondes"
diff --git a/lang/hi/LC_MESSAGES/default.po b/lang/hi/LC_MESSAGES/default.po
index 7fd1319..e769c0a 100644
--- a/lang/hi/LC_MESSAGES/default.po
+++ b/lang/hi/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "टोरेंट्स"
msgid "searching_for_new_results"
-msgstr "नए परिणामों की खोज कर रहे हैं..."
+msgstr "नए परिणामों की खोज कर रहे हैं"
msgid "previous"
msgstr "पिछला"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "अगला"
msgid "fetched_in"
-msgstr "%s सेकंड में प्राप्त किया गया"
+msgstr "%s"
msgid "sort_seeders"
msgstr "सीडर्स के अनुसार छांटें"
@@ -184,8 +184,6 @@ msgstr "सड़कें"
msgid "satellite"
msgstr "सैटेलाइट"
-msgid "esri_satellite"
-msgstr "ESRI सैटेलाइट"
msgid "topographic"
msgstr "टोपोग्राफिक"
@@ -198,3 +196,9 @@ msgstr "आप यहाँ हैं: "
msgid "meters_from_point"
msgstr "मीटर इस बिंदु से दूर"
+
+msgid "seconds"
+msgstr "सेकंड"
+
+msgid "milliseconds"
+msgstr "मिलीसेकंड"
diff --git a/lang/hr/LC_MESSAGES/default.po b/lang/hr/LC_MESSAGES/default.po
index 0e881ab..b01dea6 100644
--- a/lang/hr/LC_MESSAGES/default.po
+++ b/lang/hr/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenti"
msgid "searching_for_new_results"
-msgstr "Traže se novi rezultati..."
+msgstr "Traže se novi rezultati"
msgid "previous"
msgstr "Prethodno"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Sljedeće"
msgid "fetched_in"
-msgstr "Dohvaćeno za %s sekundi"
+msgstr "Dohvaćeno za %s"
msgid "sort_seeders"
msgstr "Sjeme (najviše)"
@@ -184,8 +184,6 @@ msgstr "Ulice"
msgid "satellite"
msgstr "Satelit"
-msgid "esri_satellite"
-msgstr "ESRI Satelit"
msgid "topographic"
msgstr "Topografski"
@@ -198,3 +196,9 @@ msgstr "Nalazite se unutar "
msgid "meters_from_point"
msgstr "metara od ove točke"
+
+msgid "seconds"
+msgstr "Sekunde"
+
+msgid "milliseconds"
+msgstr "Milisekunde"
diff --git a/lang/hu/LC_MESSAGES/default.po b/lang/hu/LC_MESSAGES/default.po
index f40d775..7376e96 100644
--- a/lang/hu/LC_MESSAGES/default.po
+++ b/lang/hu/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Új találatok keresése..."
+msgstr "Új találatok keresése"
msgid "previous"
msgstr "Előző"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Következő"
msgid "fetched_in"
-msgstr "Lekérve %s másodperc alatt"
+msgstr "Lekérve %s"
msgid "sort_seeders"
msgstr "Rendezés seederek szerint"
@@ -184,8 +184,6 @@ msgstr "Utcák"
msgid "satellite"
msgstr "Műhold"
-msgid "esri_satellite"
-msgstr "ESRI Műhold"
msgid "topographic"
msgstr "Topográfiai"
@@ -198,3 +196,9 @@ msgstr "Ön itt van: "
msgid "meters_from_point"
msgstr "méterre ettől a ponttól"
+
+msgid "seconds"
+msgstr "Másodperc"
+
+msgid "milliseconds"
+msgstr "Milliszekundum"
diff --git a/lang/hy/LC_MESSAGES/default.po b/lang/hy/LC_MESSAGES/default.po
index 0e1492d..6e0ab2d 100644
--- a/lang/hy/LC_MESSAGES/default.po
+++ b/lang/hy/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Թորրենտներ"
msgid "searching_for_new_results"
-msgstr "Նոր արդյունքներ որոնվում են..."
+msgstr "Նոր արդյունքներ որոնվում են"
msgid "previous"
msgstr "Նախորդը"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Հաջորդը"
msgid "fetched_in"
-msgstr "Բեռնված է %s վայրկյանում"
+msgstr "Բեռնված է %s"
msgid "sort_seeders"
msgstr "Ներբեռնում (արտահանող)"
@@ -184,8 +184,6 @@ msgstr "Փողոցներ"
msgid "satellite"
msgstr "Արհեստական արբանյակ"
-msgid "esri_satellite"
-msgstr "ESRI Արհեստական արբանյակ"
msgid "topographic"
msgstr "Տոպոգրաֆիկ"
@@ -198,3 +196,9 @@ msgstr "Դուք գտնվում եք "
msgid "meters_from_point"
msgstr "մետր հեռավորության վրա այս կետից"
+
+msgid "seconds"
+msgstr "Վայրկյաններ"
+
+msgid "milliseconds"
+msgstr "Միլիվայրկյաններ"
diff --git a/lang/id/LC_MESSAGES/default.po b/lang/id/LC_MESSAGES/default.po
index 54e2473..8717ed9 100644
--- a/lang/id/LC_MESSAGES/default.po
+++ b/lang/id/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrent"
msgid "searching_for_new_results"
-msgstr "Mencari hasil baru..."
+msgstr "Mencari hasil baru"
msgid "previous"
msgstr "Sebelumnya"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Berikutnya"
msgid "fetched_in"
-msgstr "Ditemukan dalam %s detik"
+msgstr "Ditemukan dalam %s"
msgid "sort_seeders"
msgstr "Urutkan berdasarkan seeder"
@@ -184,8 +184,6 @@ msgstr "Jalan"
msgid "satellite"
msgstr "Satelit"
-msgid "esri_satellite"
-msgstr "Satelit ESRI"
msgid "topographic"
msgstr "Topografi"
@@ -198,3 +196,9 @@ msgstr "Anda berada dalam jarak "
msgid "meters_from_point"
msgstr "meter dari titik ini"
+
+msgid "seconds"
+msgstr "Detik"
+
+msgid "milliseconds"
+msgstr "Milidetik"
diff --git a/lang/it/LC_MESSAGES/default.po b/lang/it/LC_MESSAGES/default.po
index 0964cb8..b8aeccb 100644
--- a/lang/it/LC_MESSAGES/default.po
+++ b/lang/it/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrent"
msgid "searching_for_new_results"
-msgstr "Ricerca di nuovi risultati..."
+msgstr "Ricerca di nuovi risultati"
msgid "previous"
msgstr "Precedente"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Successivo"
msgid "fetched_in"
-msgstr "Ottenuto in %s secondi"
+msgstr "Ottenuto in %s"
msgid "sort_seeders"
msgstr "Ordina per seeders"
@@ -184,8 +184,6 @@ msgstr "Strade"
msgid "satellite"
msgstr "Satellitare"
-msgid "esri_satellite"
-msgstr "Satellitare ESRI"
msgid "topographic"
msgstr "Topografico"
@@ -198,3 +196,9 @@ msgstr "Sei entro "
msgid "meters_from_point"
msgstr "metri da questo punto"
+
+msgid "seconds"
+msgstr "Secondi"
+
+msgid "milliseconds"
+msgstr "Millisecondi"
diff --git a/lang/iw/LC_MESSAGES/default.po b/lang/iw/LC_MESSAGES/default.po
index eb7c786..94e40dd 100644
--- a/lang/iw/LC_MESSAGES/default.po
+++ b/lang/iw/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "טורנטים"
msgid "searching_for_new_results"
-msgstr "מחפש תוצאות חדשות..."
+msgstr "מחפש תוצאות חדשות"
msgid "previous"
msgstr "הקודם"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "הבא"
msgid "fetched_in"
-msgstr "הובא ב-%s שניות"
+msgstr "הובא ב-%s"
msgid "sort_seeders"
msgstr "מיון לפי משתפים"
@@ -184,8 +184,6 @@ msgstr "רחובות"
msgid "satellite"
msgstr "לוויין"
-msgid "esri_satellite"
-msgstr "לוויין ESRI"
msgid "topographic"
msgstr "טופוגרפי"
@@ -198,3 +196,9 @@ msgstr "אתם נמצאים במרחק של "
msgid "meters_from_point"
msgstr "מטרים מהנקודה הזו"
+
+msgid "seconds"
+msgstr "שניות"
+
+msgid "milliseconds"
+msgstr "אלפיות שניה"
diff --git a/lang/ja/LC_MESSAGES/default.po b/lang/ja/LC_MESSAGES/default.po
index 47bf52a..e7408ab 100644
--- a/lang/ja/LC_MESSAGES/default.po
+++ b/lang/ja/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "トレント"
msgid "searching_for_new_results"
-msgstr "新しい結果を検索中..."
+msgstr "新しい結果を検索中"
msgid "previous"
msgstr "前"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "次"
msgid "fetched_in"
-msgstr "%s 秒で取得"
+msgstr "%s"
msgid "sort_seeders"
msgstr "シーダーで並べ替え"
@@ -184,8 +184,6 @@ msgstr "ストリート"
msgid "satellite"
msgstr "衛星"
-msgid "esri_satellite"
-msgstr "ESRI 衛星"
msgid "topographic"
msgstr "地形図"
@@ -198,3 +196,9 @@ msgstr "あなたは "
msgid "meters_from_point"
msgstr "メートル以内の位置にいます"
+
+msgid "seconds"
+msgstr "秒"
+
+msgid "milliseconds"
+msgstr "ミリ秒"
diff --git a/lang/ko/LC_MESSAGES/default.po b/lang/ko/LC_MESSAGES/default.po
index 1ee8d0e..92569ec 100644
--- a/lang/ko/LC_MESSAGES/default.po
+++ b/lang/ko/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "토렌트"
msgid "searching_for_new_results"
-msgstr "새로운 결과를 검색 중..."
+msgstr "새로운 결과를 검색 중"
msgid "previous"
msgstr "이전"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "다음"
msgid "fetched_in"
-msgstr "%s초 만에 가져옴"
+msgstr "%s"
msgid "sort_seeders"
msgstr "시더 기준 정렬"
@@ -184,8 +184,6 @@ msgstr "거리"
msgid "satellite"
msgstr "위성"
-msgid "esri_satellite"
-msgstr "ESRI 위성"
msgid "topographic"
msgstr "지형도"
@@ -198,3 +196,9 @@ msgstr "당신은 이 안에 있습니다: "
msgid "meters_from_point"
msgstr "미터 떨어진 지점"
+
+msgid "seconds"
+msgstr "초"
+
+msgid "milliseconds"
+msgstr "밀리초"
diff --git a/lang/lt/LC_MESSAGES/default.po b/lang/lt/LC_MESSAGES/default.po
index 9f21533..ec2181d 100644
--- a/lang/lt/LC_MESSAGES/default.po
+++ b/lang/lt/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrentai"
msgid "searching_for_new_results"
-msgstr "Ieškoma naujų rezultatų..."
+msgstr "Ieškoma naujų rezultatų"
msgid "previous"
msgstr "Ankstesnis"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Kitas"
msgid "fetched_in"
-msgstr "Gauta per %s sekundes"
+msgstr "Gauta per %s"
msgid "sort_seeders"
msgstr "Rikiuoti pagal siuntėjus"
@@ -184,8 +184,6 @@ msgstr "Gatvės"
msgid "satellite"
msgstr "Palydovas"
-msgid "esri_satellite"
-msgstr "ESRI palydovas"
msgid "topographic"
msgstr "Topografinis"
@@ -198,3 +196,9 @@ msgstr "Jūs esate "
msgid "meters_from_point"
msgstr "metrų nuo šio taško"
+
+msgid "seconds"
+msgstr "Sekundės"
+
+msgid "milliseconds"
+msgstr "Milisekundės"
diff --git a/lang/lv/LC_MESSAGES/default.po b/lang/lv/LC_MESSAGES/default.po
index a2ef8c3..b0a57da 100644
--- a/lang/lv/LC_MESSAGES/default.po
+++ b/lang/lv/LC_MESSAGES/default.po
@@ -1,4 +1,4 @@
- msgid "settings_title"
+msgid "settings_title"
msgstr "Iestatījumi"
msgid "settings"
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torenti"
msgid "searching_for_new_results"
-msgstr "Meklē jaunus rezultātus..."
+msgstr "Meklē jaunus rezultātus"
msgid "previous"
msgstr "Iepriekšējais"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Nākamais"
msgid "fetched_in"
-msgstr "Iegūts %s sekundēs"
+msgstr "Iegūts %s"
msgid "sort_seeders"
msgstr "Kārtot pēc sējējiem"
@@ -184,8 +184,6 @@ msgstr "Ielas"
msgid "satellite"
msgstr "Satelīts"
-msgid "esri_satellite"
-msgstr "ESRI satelīts"
msgid "topographic"
msgstr "Topogrāfiskais"
@@ -198,3 +196,9 @@ msgstr "Jūs atrodaties "
msgid "meters_from_point"
msgstr "metru attālumā no šī punkta"
+
+msgid "seconds"
+msgstr "Sekundes"
+
+msgid "milliseconds"
+msgstr "Milisekundes"
\ No newline at end of file
diff --git a/lang/nl/LC_MESSAGES/default.po b/lang/nl/LC_MESSAGES/default.po
index 14b244b..2a0c44b 100644
--- a/lang/nl/LC_MESSAGES/default.po
+++ b/lang/nl/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Nieuwe resultaten zoeken..."
+msgstr "Nieuwe resultaten zoeken"
msgid "previous"
msgstr "Vorige"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Volgende"
msgid "fetched_in"
-msgstr "Opgehaald in %s seconden"
+msgstr "Opgehaald in %s"
msgid "sort_seeders"
msgstr "Sorteer op seeders"
@@ -184,8 +184,6 @@ msgstr "Straten"
msgid "satellite"
msgstr "Satelliet"
-msgid "esri_satellite"
-msgstr "ESRI Satelliet"
msgid "topographic"
msgstr "Topografisch"
@@ -198,3 +196,9 @@ msgstr "Je bevindt je binnen "
msgid "meters_from_point"
msgstr "meter van dit punt"
+
+msgid "seconds"
+msgstr "Seconden"
+
+msgid "milliseconds"
+msgstr "Milliseconden"
\ No newline at end of file
diff --git a/lang/no/LC_MESSAGES/default.po b/lang/no/LC_MESSAGES/default.po
index 369f472..77201db 100644
--- a/lang/no/LC_MESSAGES/default.po
+++ b/lang/no/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenter"
msgid "searching_for_new_results"
-msgstr "Søker etter nye resultater..."
+msgstr "Søker etter nye resultater"
msgid "previous"
msgstr "Forrige"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Neste"
msgid "fetched_in"
-msgstr "Hentet på %s sekunder"
+msgstr "Hentet på %s"
msgid "sort_seeders"
msgstr "Sorter etter seeders"
@@ -184,8 +184,6 @@ msgstr "Gater"
msgid "satellite"
msgstr "Satellitt"
-msgid "esri_satellite"
-msgstr "ESRI Satellitt"
msgid "topographic"
msgstr "Topografisk"
@@ -198,3 +196,9 @@ msgstr "Du er innenfor "
msgid "meters_from_point"
msgstr "meter fra dette punktet"
+
+msgid "seconds"
+msgstr "Sekunder"
+
+msgid "milliseconds"
+msgstr "Millisekunder"
\ No newline at end of file
diff --git a/lang/pl/LC_MESSAGES/default.po b/lang/pl/LC_MESSAGES/default.po
index 2c48817..31c5e6f 100644
--- a/lang/pl/LC_MESSAGES/default.po
+++ b/lang/pl/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenty"
msgid "searching_for_new_results"
-msgstr "Wyszukiwanie nowych wyników..."
+msgstr "Wyszukiwanie nowych wyników"
msgid "previous"
msgstr "Poprzednie"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Następne"
msgid "fetched_in"
-msgstr "Pobrano w %s sekund"
+msgstr "Pobrano w %s"
msgid "sort_seeders"
msgstr "Liczba seedów"
@@ -184,8 +184,6 @@ msgstr "Ulice"
msgid "satellite"
msgstr "Satelita"
-msgid "esri_satellite"
-msgstr "Esri Satelita"
msgid "topographic"
msgstr "Topograficzna"
@@ -197,4 +195,10 @@ msgid "you_are_within"
msgstr "Znajdujesz się w odległości "
msgid "meters_from_point"
-msgstr "metrów od tego punktu"
\ No newline at end of file
+msgstr "metrów od tego punktu"
+
+msgid "seconds"
+msgstr "Sekundy"
+
+msgid "milliseconds"
+msgstr "Milisekundy"
diff --git a/lang/pt/LC_MESSAGES/default.po b/lang/pt/LC_MESSAGES/default.po
index 440abea..a6f1874 100644
--- a/lang/pt/LC_MESSAGES/default.po
+++ b/lang/pt/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Procurando por novos resultados..."
+msgstr "Procurando por novos resultados"
msgid "previous"
msgstr "Anterior"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Próximo"
msgid "fetched_in"
-msgstr "Obtido em %s segundos"
+msgstr "Obtido em %s"
msgid "sort_seeders"
msgstr "Ordenar por seeders"
@@ -184,8 +184,6 @@ msgstr "Ruas"
msgid "satellite"
msgstr "Satélite"
-msgid "esri_satellite"
-msgstr "Satélite ESRI"
msgid "topographic"
msgstr "Topográfico"
@@ -198,3 +196,9 @@ msgstr "Você está dentro de "
msgid "meters_from_point"
msgstr "metros deste ponto"
+
+msgid "seconds"
+msgstr "Segundos"
+
+msgid "milliseconds"
+msgstr "Milissegundos"
\ No newline at end of file
diff --git a/lang/ro/LC_MESSAGES/default.po b/lang/ro/LC_MESSAGES/default.po
index a3d3338..20a7134 100644
--- a/lang/ro/LC_MESSAGES/default.po
+++ b/lang/ro/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenturi"
msgid "searching_for_new_results"
-msgstr "Caut rezultate noi..."
+msgstr "Caut rezultate noi"
msgid "previous"
msgstr "Anterior"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Următorul"
msgid "fetched_in"
-msgstr "Obținut în %s secunde"
+msgstr "Obținut în %s"
msgid "sort_seeders"
msgstr "Sortează după seeders"
@@ -184,8 +184,6 @@ msgstr "Străzi"
msgid "satellite"
msgstr "Satelit"
-msgid "esri_satellite"
-msgstr "Satelit ESRI"
msgid "topographic"
msgstr "Topografic"
@@ -198,3 +196,9 @@ msgstr "Te afli la "
msgid "meters_from_point"
msgstr "metri de acest punct"
+
+msgid "seconds"
+msgstr "Secunde"
+
+msgid "milliseconds"
+msgstr "Milisecunde"
\ No newline at end of file
diff --git a/lang/ru/LC_MESSAGES/default.po b/lang/ru/LC_MESSAGES/default.po
index b90d86d..8007059 100644
--- a/lang/ru/LC_MESSAGES/default.po
+++ b/lang/ru/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торренты"
msgid "searching_for_new_results"
-msgstr "Идёт поиск новых результатов..."
+msgstr "Идёт поиск новых результатов"
msgid "previous"
msgstr "Предыдущий"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Следующий"
msgid "fetched_in"
-msgstr "Получено за %s секунд"
+msgstr "Получено за %s"
msgid "sort_seeders"
msgstr "Сортировать по сидерам"
@@ -184,8 +184,6 @@ msgstr "Улицы"
msgid "satellite"
msgstr "Спутник"
-msgid "esri_satellite"
-msgstr "Спутник ESRI"
msgid "topographic"
msgstr "Топографическая"
@@ -198,3 +196,9 @@ msgstr "Вы находитесь в "
msgid "meters_from_point"
msgstr "метрах от этой точки"
+
+msgid "seconds"
+msgstr "Секунды"
+
+msgid "milliseconds"
+msgstr "Миллисекунды"
diff --git a/lang/sk/LC_MESSAGES/default.po b/lang/sk/LC_MESSAGES/default.po
index 611db5c..e2ba122 100644
--- a/lang/sk/LC_MESSAGES/default.po
+++ b/lang/sk/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenty"
msgid "searching_for_new_results"
-msgstr "Hľadám nové výsledky..."
+msgstr "Hľadám nové výsledky"
msgid "previous"
msgstr "Predchádzajúce"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Ďalšie"
msgid "fetched_in"
-msgstr "Načítané za %s sekúnd"
+msgstr "Načítané za %s"
msgid "sort_seeders"
msgstr "Zoradiť podľa seedrov"
@@ -184,8 +184,6 @@ msgstr "Ulice"
msgid "satellite"
msgstr "Satelit"
-msgid "esri_satellite"
-msgstr "ESRI Satelit"
msgid "topographic"
msgstr "Topografické"
@@ -198,3 +196,9 @@ msgstr "Nachádzate sa vo vzdialenosti "
msgid "meters_from_point"
msgstr "metrov od tohto bodu"
+
+msgid "seconds"
+msgstr "Sekundy"
+
+msgid "milliseconds"
+msgstr "Milisekundy"
diff --git a/lang/sl/LC_MESSAGES/default.po b/lang/sl/LC_MESSAGES/default.po
index 1acc1f0..26d1bc7 100644
--- a/lang/sl/LC_MESSAGES/default.po
+++ b/lang/sl/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrenti"
msgid "searching_for_new_results"
-msgstr "Iskanje novih rezultatov..."
+msgstr "Iskanje novih rezultatov"
msgid "previous"
msgstr "Prejšnje"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Naslednje"
msgid "fetched_in"
-msgstr "Pridobljeno v %s sekundah"
+msgstr "Pridobljeno v %s"
msgid "sort_seeders"
msgstr "Razvrsti po seederjih"
@@ -184,8 +184,6 @@ msgstr "Ulice"
msgid "satellite"
msgstr "Satelit"
-msgid "esri_satellite"
-msgstr "ESRI satelit"
msgid "topographic"
msgstr "Topografsko"
@@ -198,3 +196,9 @@ msgstr "Nahajate se znotraj "
msgid "meters_from_point"
msgstr "metrov od te točke"
+
+msgid "seconds"
+msgstr "Sekunde"
+
+msgid "milliseconds"
+msgstr "Milisekunde"
\ No newline at end of file
diff --git a/lang/sr/LC_MESSAGES/default.po b/lang/sr/LC_MESSAGES/default.po
index 19e953d..b05fdc0 100644
--- a/lang/sr/LC_MESSAGES/default.po
+++ b/lang/sr/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торенти"
msgid "searching_for_new_results"
-msgstr "Тражење нових резултата..."
+msgstr "Тражење нових резултата"
msgid "previous"
msgstr "Претходно"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Следеће"
msgid "fetched_in"
-msgstr "Преузето за %s секунди"
+msgstr "Преузето за %s"
msgid "sort_seeders"
msgstr "Сортирај по сеедерима"
@@ -184,8 +184,6 @@ msgstr "Улице"
msgid "satellite"
msgstr "Сателит"
-msgid "esri_satellite"
-msgstr "ESRI сателит"
msgid "topographic"
msgstr "Топографска"
@@ -198,3 +196,9 @@ msgstr "Налазите се на удаљености од "
msgid "meters_from_point"
msgstr "метара од ове тачке"
+
+msgid "seconds"
+msgstr "Секунди"
+
+msgid "milliseconds"
+msgstr "Милисекунде"
diff --git a/lang/sv/LC_MESSAGES/default.po b/lang/sv/LC_MESSAGES/default.po
index cbf0306..99d06b1 100644
--- a/lang/sv/LC_MESSAGES/default.po
+++ b/lang/sv/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Söker efter nya resultat..."
+msgstr "Söker efter nya resultat"
msgid "previous"
msgstr "Föregående"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Nästa"
msgid "fetched_in"
-msgstr "Hämtad på %s sekunder"
+msgstr "Hämtad på %s"
msgid "sort_seeders"
msgstr "Sortera efter seeders"
@@ -184,8 +184,6 @@ msgstr "Gator"
msgid "satellite"
msgstr "Satellit"
-msgid "esri_satellite"
-msgstr "ESRI Satellit"
msgid "topographic"
msgstr "Topografisk"
@@ -198,3 +196,9 @@ msgstr "Du är inom "
msgid "meters_from_point"
msgstr "meter från denna punkt"
+
+msgid "seconds"
+msgstr "Sekunder"
+
+msgid "milliseconds"
+msgstr "Millisekunder"
diff --git a/lang/sw/LC_MESSAGES/default.po b/lang/sw/LC_MESSAGES/default.po
index 6834448..326efd1 100644
--- a/lang/sw/LC_MESSAGES/default.po
+++ b/lang/sw/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torenti"
msgid "searching_for_new_results"
-msgstr "Inatafuta matokeo mapya..."
+msgstr "Inatafuta matokeo mapya"
msgid "previous"
msgstr "Ya awali"
@@ -184,8 +184,6 @@ msgstr "Mitaa"
msgid "satellite"
msgstr "Setilaiti"
-msgid "esri_satellite"
-msgstr "Setilaiti ya ESRI"
msgid "topographic"
msgstr "Topografia"
@@ -198,3 +196,9 @@ msgstr "Uko ndani ya "
msgid "meters_from_point"
msgstr "mita kutoka eneo hili"
+
+msgid "seconds"
+msgstr "Sekunde"
+
+msgid "milliseconds"
+msgstr "Milisekunde"
diff --git a/lang/th/LC_MESSAGES/default.po b/lang/th/LC_MESSAGES/default.po
index 4749c83..cda462b 100644
--- a/lang/th/LC_MESSAGES/default.po
+++ b/lang/th/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "ทอร์เรนต์"
msgid "searching_for_new_results"
-msgstr "กำลังค้นหาผลลัพธ์ใหม่..."
+msgstr "กำลังค้นหาผลลัพธ์ใหม่"
msgid "previous"
msgstr "ก่อนหน้า"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "ถัดไป"
msgid "fetched_in"
-msgstr "ดึงข้อมูลใน %s วินาที"
+msgstr "ดึงข้อมูลใน %s"
msgid "sort_seeders"
msgstr "จัดเรียงตามซีดเดอร์"
@@ -184,8 +184,6 @@ msgstr "ถนน"
msgid "satellite"
msgstr "ดาวเทียม"
-msgid "esri_satellite"
-msgstr "ดาวเทียม ESRI"
msgid "topographic"
msgstr "ภูมิประเทศ"
@@ -198,3 +196,9 @@ msgstr "คุณอยู่ภายในระยะ "
msgid "meters_from_point"
msgstr "เมตรจากจุดนี้"
+
+msgid "seconds"
+msgstr "วินาที"
+
+msgid "milliseconds"
+msgstr "มิลลิวินาที"
diff --git a/lang/tl/LC_MESSAGES/default.po b/lang/tl/LC_MESSAGES/default.po
index eab7127..41b467a 100644
--- a/lang/tl/LC_MESSAGES/default.po
+++ b/lang/tl/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Mga Torrents"
msgid "searching_for_new_results"
-msgstr "Naghahanap ng mga bagong resulta..."
+msgstr "Naghahanap ng mga bagong resulta"
msgid "previous"
msgstr "Nakaraan"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Susunod"
msgid "fetched_in"
-msgstr "Nakuha sa %s segundo"
+msgstr "Nakuha sa %s"
msgid "sort_seeders"
msgstr "Ayusin ayon sa seeders"
@@ -184,8 +184,6 @@ msgstr "Mga Kalye"
msgid "satellite"
msgstr "Satelite"
-msgid "esri_satellite"
-msgstr "ESRI Satelite"
msgid "topographic"
msgstr "Topograpiko"
@@ -198,3 +196,9 @@ msgstr "Ikaw ay nasa loob ng "
msgid "meters_from_point"
msgstr "metro mula sa puntong ito"
+
+msgid "seconds"
+msgstr "Segundo"
+
+msgid "milliseconds"
+msgstr "Milyasegundo"
\ No newline at end of file
diff --git a/lang/tr/LC_MESSAGES/default.po b/lang/tr/LC_MESSAGES/default.po
index aafad53..edfdeb8 100644
--- a/lang/tr/LC_MESSAGES/default.po
+++ b/lang/tr/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrentler"
msgid "searching_for_new_results"
-msgstr "Yeni sonuçlar aranıyor..."
+msgstr "Yeni sonuçlar aranıyor"
msgid "previous"
msgstr "Önceki"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Sonraki"
msgid "fetched_in"
-msgstr "%s saniyede alındı"
+msgstr "%s"
msgid "sort_seeders"
msgstr "Seeders'a göre sırala"
@@ -184,8 +184,6 @@ msgstr "Sokaklar"
msgid "satellite"
msgstr "Uydu"
-msgid "esri_satellite"
-msgstr "ESRI Uydu"
msgid "topographic"
msgstr "Topografik"
@@ -198,3 +196,9 @@ msgstr "Şuradasınız: "
msgid "meters_from_point"
msgstr "metre bu noktadan"
+
+msgid "seconds"
+msgstr "Saniye"
+
+msgid "milliseconds"
+msgstr "Milisaniye"
\ No newline at end of file
diff --git a/lang/uk/LC_MESSAGES/default.po b/lang/uk/LC_MESSAGES/default.po
index e2fc3ab..d5b8b38 100644
--- a/lang/uk/LC_MESSAGES/default.po
+++ b/lang/uk/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Торренти"
msgid "searching_for_new_results"
-msgstr "Шукаю нові результати..."
+msgstr "Шукаю нові результати"
msgid "previous"
msgstr "Попередній"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Наступний"
msgid "fetched_in"
-msgstr "Отримано за %s секунд"
+msgstr "Отримано за %s"
msgid "sort_seeders"
msgstr "Сортувати за сідерами"
@@ -184,8 +184,6 @@ msgstr "Вулиці"
msgid "satellite"
msgstr "Супутник"
-msgid "esri_satellite"
-msgstr "Супутник ESRI"
msgid "topographic"
msgstr "Топографічна"
@@ -198,3 +196,9 @@ msgstr "Ви перебуваєте в межах "
msgid "meters_from_point"
msgstr "метрів від цієї точки"
+
+msgid "seconds"
+msgstr "Секунди"
+
+msgid "milliseconds"
+msgstr "Мілісекунди"
diff --git a/lang/vi/LC_MESSAGES/default.po b/lang/vi/LC_MESSAGES/default.po
index a5303ce..3006edd 100644
--- a/lang/vi/LC_MESSAGES/default.po
+++ b/lang/vi/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "Torrents"
msgid "searching_for_new_results"
-msgstr "Đang tìm kiếm kết quả mới..."
+msgstr "Đang tìm kiếm kết quả mới"
msgid "previous"
msgstr "Trước"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "Tiếp theo"
msgid "fetched_in"
-msgstr "Đã tìm trong %s giây"
+msgstr "Đã tìm trong %s"
msgid "sort_seeders"
msgstr "Sắp xếp theo seeders"
@@ -184,8 +184,6 @@ msgstr "Đường phố"
msgid "satellite"
msgstr "Vệ tinh"
-msgid "esri_satellite"
-msgstr "Vệ tinh ESRI"
msgid "topographic"
msgstr "Địa hình"
@@ -198,3 +196,9 @@ msgstr "Bạn đang ở trong phạm vi "
msgid "meters_from_point"
msgstr "mét từ điểm này"
+
+msgid "seconds"
+msgstr "Giây"
+
+msgid "milliseconds"
+msgstr "Mili giây"
diff --git a/lang/zh-CN/LC_MESSAGES/default.po b/lang/zh-CN/LC_MESSAGES/default.po
index d173139..f013786 100644
--- a/lang/zh-CN/LC_MESSAGES/default.po
+++ b/lang/zh-CN/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "种子"
msgid "searching_for_new_results"
-msgstr "正在搜索新结果..."
+msgstr "正在搜索新结果"
msgid "previous"
msgstr "上一页"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "下一页"
msgid "fetched_in"
-msgstr "%s 秒内获取"
+msgstr "%s"
msgid "sort_seeders"
msgstr "排序:上传者"
@@ -184,8 +184,6 @@ msgstr "街道"
msgid "satellite"
msgstr "卫星"
-msgid "esri_satellite"
-msgstr "ESRI 卫星"
msgid "topographic"
msgstr "地形图"
@@ -198,3 +196,9 @@ msgstr "您距离此点 "
msgid "meters_from_point"
msgstr "米"
+
+msgid "seconds"
+msgstr "秒"
+
+msgid "milliseconds"
+msgstr "毫秒"
diff --git a/lang/zh-TW/LC_MESSAGES/default.po b/lang/zh-TW/LC_MESSAGES/default.po
index 117897e..365d9fc 100644
--- a/lang/zh-TW/LC_MESSAGES/default.po
+++ b/lang/zh-TW/LC_MESSAGES/default.po
@@ -107,7 +107,7 @@ msgid "torrents"
msgstr "種子"
msgid "searching_for_new_results"
-msgstr "正在搜尋新結果..."
+msgstr "正在搜尋新結果"
msgid "previous"
msgstr "上一頁"
@@ -116,7 +116,7 @@ msgid "next"
msgstr "下一頁"
msgid "fetched_in"
-msgstr "已於 %s 秒內加載"
+msgstr "已於 %s"
msgid "sort_seeders"
msgstr "排序(種子數量)"
@@ -184,8 +184,6 @@ msgstr "街道"
msgid "satellite"
msgstr "衛星"
-msgid "esri_satellite"
-msgstr "ESRI 衛星"
msgid "topographic"
msgstr "地形"
@@ -198,3 +196,9 @@ msgstr "您在 "
msgid "meters_from_point"
msgstr "公尺範圍內"
+
+msgid "seconds"
+msgstr "秒"
+
+msgid "milliseconds"
+msgstr "毫秒"
diff --git a/main.go b/main.go
index 12c2381..7b72235 100755
--- a/main.go
+++ b/main.go
@@ -164,6 +164,8 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
handleImageSearch(w, r, settings, query, page)
case "video":
handleVideoSearch(w, settings, query, page)
+ case "music":
+ handleMusicSearch(w, settings, query, page)
case "map":
handleMapSearch(w, settings, query)
case "forum":
@@ -173,7 +175,7 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
case "text":
fallthrough
default:
- HandleTextSearch(w, settings, query, page)
+ HandleTextSearchWithInstantAnswer(w, settings, query, page)
}
}
@@ -226,7 +228,7 @@ func runServer() {
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
http.ServeFile(w, r, "static/opensearch.xml")
})
- printInfo("Website functionality enabled.")
+ printInfo("Website is enabled.")
} else {
// Redirect all website routes to a "service disabled" handler
http.HandleFunc("/static/", handleWebsiteDisabled)
@@ -238,11 +240,7 @@ func runServer() {
http.HandleFunc("/image_status", handleWebsiteDisabled)
http.HandleFunc("/privacy", handleWebsiteDisabled)
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
- printInfo("Website functionality disabled.")
- }
-
- if config.NodesEnabled {
- http.HandleFunc("/node", handleNodeRequest)
+ printInfo("Website is disabled.")
}
printMessage("Server is listening on http://localhost:%d", config.Port)
@@ -252,7 +250,7 @@ func runServer() {
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
w.WriteHeader(http.StatusServiceUnavailable)
- _, _ = w.Write([]byte("The website functionality is currently disabled."))
+ _, _ = w.Write([]byte("The website is currently disabled."))
}
func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {
@@ -280,20 +278,5 @@ func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {
LanguageOptions: languageOptions,
}
- // Parse the template
- tmpl, err := template.New("privacy.html").ParseFiles("templates/privacy.html")
- if err != nil {
- log.Printf("Error parsing template: %v", err)
- http.Error(w, "Internal Server Error", http.StatusInternalServerError)
- return
- }
-
- // Set the response content type
- w.Header().Set("Content-Type", "text/html; charset=utf-8")
-
- // Execute the template
- if err := tmpl.Execute(w, data); err != nil {
- log.Printf("Error executing template: %v", err)
- http.Error(w, "Internal Server Error", http.StatusInternalServerError)
- }
+ renderTemplate(w, "privacy.html", toMap(data))
}
diff --git a/map.go b/map.go
index 4927fc0..ab3c5a5 100755
--- a/map.go
+++ b/map.go
@@ -5,7 +5,6 @@ import (
"fmt"
"net/http"
"net/url"
- "time"
)
type NominatimResponse struct {
@@ -59,7 +58,7 @@ func geocodeQuery(query string) (latitude, longitude string, found bool, err err
func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string) {
// Start measuring the time for geocoding the query
- startTime := time.Now()
+ //startTime := time.Now()
// Geocode the query to get coordinates
latitude, longitude, found, err := geocodeQuery(query)
@@ -70,15 +69,15 @@ func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string)
}
// Measure the elapsed time for geocoding
- elapsedTime := time.Since(startTime)
+ //elapsed := time.Since(startTime)
// Prepare the data to pass to the template
data := map[string]interface{}{
- "Query": query,
- "Latitude": latitude,
- "Longitude": longitude,
- "Found": found,
- "Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
+ "Query": query,
+ "Latitude": latitude,
+ "Longitude": longitude,
+ "Found": found,
+ //"Fetched": FormatElapsedTime(elapsed), // not used in map tab
"Theme": settings.Theme,
"Safe": settings.SafeSearch,
"IsThemeDark": settings.IsThemeDark,
diff --git a/music-bandcamp.go b/music-bandcamp.go
new file mode 100644
index 0000000..95922ae
--- /dev/null
+++ b/music-bandcamp.go
@@ -0,0 +1,80 @@
+// music-bandcamp.go - Bandcamp specific implementation
+package main
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+func SearchBandcamp(query string, page int) ([]MusicResult, error) {
+ baseURL := "https://bandcamp.com/search?"
+ params := url.Values{
+ "q": []string{query},
+ "page": []string{fmt.Sprintf("%d", page)},
+ }
+
+ resp, err := http.Get(baseURL + params.Encode())
+ if err != nil {
+ return nil, fmt.Errorf("request failed: %v", err)
+ }
+ defer resp.Body.Close()
+
+ doc, err := goquery.NewDocumentFromReader(resp.Body)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse HTML: %v", err)
+ }
+
+ var results []MusicResult
+
+ doc.Find("li.searchresult").Each(func(i int, s *goquery.Selection) {
+ // Extract the item type
+ itemType := strings.ToLower(strings.TrimSpace(s.Find("div.itemtype").Text()))
+
+ // Skip if the item is not an album or track
+ if itemType != "album" && itemType != "track" {
+ return
+ }
+
+ result := MusicResult{Source: "Bandcamp"}
+
+ // URL extraction
+ if urlSel := s.Find("div.itemurl a"); urlSel.Length() > 0 {
+ result.URL = strings.TrimSpace(urlSel.Text())
+ }
+
+ // Title extraction
+ if titleSel := s.Find("div.heading a"); titleSel.Length() > 0 {
+ result.Title = strings.TrimSpace(titleSel.Text())
+ }
+
+ // Artist extraction
+ if artistSel := s.Find("div.subhead"); artistSel.Length() > 0 {
+ result.Artist = strings.TrimSpace(artistSel.Text())
+ }
+
+ // Thumbnail extraction
+ if thumbSel := s.Find("div.art img"); thumbSel.Length() > 0 {
+ result.Thumbnail, _ = thumbSel.Attr("src")
+ }
+
+ // // Iframe URL construction
+ // if linkHref, exists := s.Find("div.itemurl a").Attr("href"); exists {
+ // if itemID := extractSearchItemID(linkHref); itemID != "" {
+ // itemType := strings.ToLower(strings.TrimSpace(s.Find("div.itemtype").Text()))
+ // result.IframeSrc = fmt.Sprintf(
+ // "https://bandcamp.com/EmbeddedPlayer/%s=%s/size=large/bgcol=000/linkcol=fff/artwork=small",
+ // itemType,
+ // itemID,
+ // )
+ // }
+ // }
+
+ results = append(results, result)
+ })
+
+ return results, nil
+}
diff --git a/music-soundcloud.go b/music-soundcloud.go
new file mode 100644
index 0000000..2aa9f46
--- /dev/null
+++ b/music-soundcloud.go
@@ -0,0 +1,211 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+type SoundCloudTrack struct {
+ ID int `json:"id"`
+ Title string `json:"title"`
+ Permalink string `json:"permalink"`
+ ArtworkURL string `json:"artwork_url"`
+ Duration int `json:"duration"`
+ User struct {
+ Username string `json:"username"`
+ Permalink string `json:"permalink"`
+ } `json:"user"`
+ Streams struct {
+ HTTPMP3128URL string `json:"http_mp3_128_url"`
+ } `json:"streams"`
+}
+
+func SearchSoundCloud(query string, page int) ([]MusicResult, error) {
+ clientID, err := extractClientID()
+ if err != nil {
+ return searchSoundCloudViaScraping(query, page)
+ }
+
+ apiResults, err := searchSoundCloudViaAPI(query, clientID, page)
+ if err == nil && len(apiResults) > 0 {
+ return convertSoundCloudResults(apiResults), nil
+ }
+
+ return searchSoundCloudViaScraping(query, page)
+}
+
+func searchSoundCloudViaAPI(query, clientID string, page int) ([]SoundCloudTrack, error) {
+ const limit = 10
+ offset := (page - 1) * limit
+
+ apiUrl := fmt.Sprintf(
+ "https://api-v2.soundcloud.com/search/tracks?q=%s&client_id=%s&limit=%d&offset=%d",
+ url.QueryEscape(query),
+ clientID,
+ limit,
+ offset,
+ )
+
+ resp, err := http.Get(apiUrl)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("API request failed with status: %d", resp.StatusCode)
+ }
+
+ var response struct {
+ Collection []SoundCloudTrack `json:"collection"`
+ }
+
+ if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
+ return nil, err
+ }
+
+ return response.Collection, nil
+}
+
+func convertSoundCloudResults(tracks []SoundCloudTrack) []MusicResult {
+ var results []MusicResult
+
+ for _, track := range tracks {
+ thumbnail := strings.Replace(track.ArtworkURL, "large", "t500x500", 1)
+ trackURL := fmt.Sprintf("https://soundcloud.com/%s/%s",
+ track.User.Permalink,
+ track.Permalink,
+ )
+
+ // Convert ms to hh:mm:ss
+ totalSeconds := track.Duration / 1000
+ hours := totalSeconds / 3600
+ minutes := (totalSeconds % 3600) / 60
+ seconds := totalSeconds % 60
+
+ var durationStr string
+ if hours > 0 {
+ durationStr = fmt.Sprintf("%d:%02d:%02d", hours, minutes, seconds)
+ } else {
+ durationStr = fmt.Sprintf("%d:%02d", minutes, seconds)
+ }
+
+ results = append(results, MusicResult{
+ Title: track.Title,
+ Artist: track.User.Username,
+ URL: trackURL,
+ Thumbnail: thumbnail,
+ //AudioURL: track.Streams.HTTPMP3128URL,
+ Source: "SoundCloud",
+ Duration: durationStr,
+ })
+ }
+ return results
+}
+
+func searchSoundCloudViaScraping(query string, page int) ([]MusicResult, error) {
+ searchUrl := fmt.Sprintf("https://soundcloud.com/search/sounds?q=%s", url.QueryEscape(query))
+ resp, err := http.Get(searchUrl)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ doc, err := goquery.NewDocumentFromReader(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ var results []MusicResult
+ doc.Find("li.searchList__item").Each(func(i int, s *goquery.Selection) {
+ titleElem := s.Find("a.soundTitle__title")
+ artistElem := s.Find("a.soundTitle__username")
+ artworkElem := s.Find(".sound__coverArt")
+
+ title := strings.TrimSpace(titleElem.Text())
+ artist := strings.TrimSpace(artistElem.Text())
+ href, _ := titleElem.Attr("href")
+ thumbnail, _ := artworkElem.Find("span.sc-artwork").Attr("style")
+
+ if thumbnail != "" {
+ if matches := regexp.MustCompile(`url\((.*?)\)`).FindStringSubmatch(thumbnail); len(matches) > 1 {
+ thumbnail = strings.Trim(matches[1], `"`)
+ }
+ }
+
+ if title == "" || href == "" {
+ return
+ }
+
+ trackURL, err := url.Parse(href)
+ if err != nil {
+ return
+ }
+
+ if trackURL.Host == "" {
+ trackURL.Scheme = "https"
+ trackURL.Host = "soundcloud.com"
+ }
+
+ trackURL.Path = strings.ReplaceAll(trackURL.Path, "//", "/")
+ fullURL := trackURL.String()
+
+ results = append(results, MusicResult{
+ Title: title,
+ Artist: artist,
+ URL: fullURL,
+ Thumbnail: thumbnail,
+ Source: "SoundCloud",
+ })
+ })
+
+ return results, nil
+}
+
+func extractClientID() (string, error) {
+ resp, err := http.Get("https://soundcloud.com/")
+ if err != nil {
+ return "", err
+ }
+ defer resp.Body.Close()
+
+ doc, err := goquery.NewDocumentFromReader(resp.Body)
+ if err != nil {
+ return "", err
+ }
+
+ var clientID string
+ doc.Find("script[src]").Each(func(i int, s *goquery.Selection) {
+ if clientID != "" {
+ return
+ }
+
+ src, _ := s.Attr("src")
+ if strings.Contains(src, "sndcdn.com/assets/") {
+ resp, err := http.Get(src)
+ if err != nil {
+ return
+ }
+ defer resp.Body.Close()
+
+ body, _ := io.ReadAll(resp.Body)
+ re := regexp.MustCompile(`client_id:"([^"]+)"`)
+ matches := re.FindSubmatch(body)
+ if len(matches) > 1 {
+ clientID = string(matches[1])
+ }
+ }
+ })
+
+ if clientID == "" {
+ return "", fmt.Errorf("client_id not found")
+ }
+ return clientID, nil
+}
diff --git a/music-spotify.go b/music-spotify.go
new file mode 100644
index 0000000..d33e6a3
--- /dev/null
+++ b/music-spotify.go
@@ -0,0 +1,81 @@
+package main
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+func SearchSpotify(query string, page int) ([]MusicResult, error) {
+ searchUrl := fmt.Sprintf("https://open.spotify.com/search/%s", url.PathEscape(query))
+
+ client := &http.Client{
+ Timeout: 10 * time.Second,
+ CheckRedirect: func(req *http.Request, via []*http.Request) error {
+ return http.ErrUseLastResponse
+ },
+ }
+
+ req, err := http.NewRequest("GET", searchUrl, nil)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create request: %v", err)
+ }
+
+ // Set user agent ?
+
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("request failed: %v", err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("received non-200 status code: %d", resp.StatusCode)
+ }
+
+ doc, err := goquery.NewDocumentFromReader(resp.Body)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse document: %v", err)
+ }
+
+ var results []MusicResult
+
+ // Find track elements
+ doc.Find(`div[data-testid="tracklist-row"]`).Each(func(i int, s *goquery.Selection) {
+ // Extract title
+ title := s.Find(`div[data-testid="tracklist-row__title"] a`).Text()
+ title = strings.TrimSpace(title)
+
+ // Extract artist
+ artist := s.Find(`div[data-testid="tracklist-row__artist"] a`).First().Text()
+ artist = strings.TrimSpace(artist)
+
+ // Extract duration
+ duration := s.Find(`div[data-testid="tracklist-row__duration"]`).First().Text()
+ duration = strings.TrimSpace(duration)
+
+ // Extract URL
+ path, _ := s.Find(`div[data-testid="tracklist-row__title"] a`).Attr("href")
+ fullUrl := fmt.Sprintf("https://open.spotify.com%s", path)
+
+ // Extract thumbnail
+ thumbnail, _ := s.Find(`img[aria-hidden="false"]`).Attr("src")
+
+ if title != "" && artist != "" {
+ results = append(results, MusicResult{
+ Title: title,
+ Artist: artist,
+ URL: fullUrl,
+ Duration: duration,
+ Thumbnail: thumbnail,
+ Source: "Spotify",
+ })
+ }
+ })
+
+ return results, nil
+}
diff --git a/music-youtube.go b/music-youtube.go
new file mode 100644
index 0000000..d262428
--- /dev/null
+++ b/music-youtube.go
@@ -0,0 +1,113 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+)
+
+type MusicAPIResponse struct {
+ Items []struct {
+ Title string `json:"title"`
+ UploaderName string `json:"uploaderName"`
+ Duration int `json:"duration"`
+ Thumbnail string `json:"thumbnail"`
+ URL string `json:"url"`
+ } `json:"items"` // Removed VideoID since we'll parse from URL
+}
+
+func SearchMusicViaPiped(query string, page int) ([]MusicResult, error) {
+ var lastError error
+
+ // We will try to use preferred instance
+ mu.Lock()
+ instance := preferredInstance
+ mu.Unlock()
+
+ if instance != "" && !disabledInstances[instance] {
+ url := fmt.Sprintf(
+ "https://%s/search?q=%s&filter=music_songs&page=%d",
+ instance,
+ url.QueryEscape(query),
+ page,
+ )
+
+ resp, err := http.Get(url)
+ if err == nil && resp.StatusCode == http.StatusOK {
+ defer resp.Body.Close()
+ var apiResp MusicAPIResponse
+ if err := json.NewDecoder(resp.Body).Decode(&apiResp); err == nil {
+ return convertPipedToMusicResults(instance, apiResp), nil
+ }
+ }
+
+ printWarn("Preferred instance %s failed for music, falling back", instance)
+ disableInstance(instance)
+ }
+
+ // 2. Fallback using others
+ mu.Lock()
+ defer mu.Unlock()
+ for _, inst := range pipedInstances {
+ if disabledInstances[inst] {
+ continue
+ }
+
+ url := fmt.Sprintf(
+ "https://%s/search?q=%s&filter=music_songs&page=%d",
+ inst,
+ url.QueryEscape(query),
+ page,
+ )
+
+ resp, err := http.Get(url)
+ if err != nil || resp.StatusCode != http.StatusOK {
+ printInfo("Disabling instance %s due to error: %v", inst, err)
+ disabledInstances[inst] = true
+ lastError = fmt.Errorf("request to %s failed: %w", inst, err)
+ continue
+ }
+
+ defer resp.Body.Close()
+ var apiResp MusicAPIResponse
+ if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
+ lastError = fmt.Errorf("failed to decode response from %s: %w", inst, err)
+ continue
+ }
+
+ preferredInstance = inst
+ return convertPipedToMusicResults(inst, apiResp), nil
+ }
+
+ return nil, fmt.Errorf("all Piped instances failed, last error: %v", lastError)
+}
+
+func convertPipedToMusicResults(instance string, resp MusicAPIResponse) []MusicResult {
+ seen := make(map[string]bool)
+ var results []MusicResult
+
+ for _, item := range resp.Items {
+ // Extract video ID from URL
+ u, err := url.Parse(item.URL)
+ if err != nil {
+ continue
+ }
+ videoID := u.Query().Get("v")
+ if videoID == "" || seen[videoID] {
+ continue
+ }
+ seen[videoID] = true
+
+ results = append(results, MusicResult{
+ Title: item.Title,
+ Artist: item.UploaderName,
+ URL: fmt.Sprintf("https://music.youtube.com%s", item.URL),
+ Duration: formatDuration(item.Duration),
+ Thumbnail: item.Thumbnail,
+ Source: "YouTube Music",
+ //AudioURL: fmt.Sprintf("https://%s/stream/%s", instance, videoID),
+ })
+ }
+ return results
+}
diff --git a/music.go b/music.go
new file mode 100644
index 0000000..76518e2
--- /dev/null
+++ b/music.go
@@ -0,0 +1,177 @@
+// music.go - Central music search handler
+package main
+
+import (
+ "net/http"
+ "sync"
+ "time"
+)
+
+type MusicSearchEngine struct {
+ Name string
+ Func func(query string, page int) ([]MusicResult, error)
+}
+
+var (
+ musicSearchEngines []MusicSearchEngine
+ cacheMutex = &sync.Mutex{}
+)
+
+var allMusicSearchEngines = []MusicSearchEngine{
+ {Name: "SoundCloud", Func: SearchSoundCloud},
+ {Name: "YouTube", Func: SearchMusicViaPiped},
+ {Name: "Bandcamp", Func: SearchBandcamp},
+ //{Name: "Spotify", Func: SearchSpotify},
+}
+
+func initMusicEngines() {
+ // Initialize with all engines if no specific config
+ musicSearchEngines = allMusicSearchEngines
+}
+
+func handleMusicSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
+ start := time.Now()
+
+ cacheKey := CacheKey{
+ Query: query,
+ Page: page,
+ Type: "music",
+ Lang: settings.SearchLanguage,
+ Safe: settings.SafeSearch == "active",
+ }
+
+ var results []MusicResult
+
+ if cached, found := resultsCache.Get(cacheKey); found {
+ if musicResults, ok := convertCacheToMusicResults(cached); ok {
+ results = musicResults
+ }
+ }
+
+ if len(results) == 0 {
+ results = fetchMusicResults(query, page)
+ if len(results) > 0 {
+ resultsCache.Set(cacheKey, convertMusicResultsToCache(results))
+ }
+ }
+
+ go prefetchMusicPages(query, page)
+
+ elapsed := time.Since(start) // Calculate duration
+
+ data := map[string]interface{}{
+ "Results": results,
+ "Query": query,
+ "Page": page,
+ "HasPrevPage": page > 1,
+ "HasNextPage": len(results) >= 10, // Default page size
+ "NoResults": len(results) == 0,
+ "MusicServices": getMusicServiceNames(),
+ "CurrentService": "all", // Default service
+ "Theme": settings.Theme,
+ "IsThemeDark": settings.IsThemeDark,
+ "Trans": Translate,
+ "Fetched": FormatElapsedTime(elapsed),
+ }
+
+ renderTemplate(w, "music.html", data)
+}
+
+// Helper to get music service names
+func getMusicServiceNames() []string {
+ names := make([]string, len(allMusicSearchEngines))
+ for i, engine := range allMusicSearchEngines {
+ names[i] = engine.Name
+ }
+ return names
+}
+
+func convertMusicResultsToCache(results []MusicResult) []SearchResult {
+ cacheResults := make([]SearchResult, len(results))
+ for i, r := range results {
+ cacheResults[i] = r
+ }
+ return cacheResults
+}
+
+func convertCacheToMusicResults(cached []SearchResult) ([]MusicResult, bool) {
+ results := make([]MusicResult, 0, len(cached))
+ for _, item := range cached {
+ if musicResult, ok := item.(MusicResult); ok {
+ results = append(results, musicResult)
+ } else {
+ return nil, false
+ }
+ }
+ return results, true
+}
+
+func fetchMusicResults(query string, page int) []MusicResult {
+ var results []MusicResult
+ resultsChan := make(chan []MusicResult, len(musicSearchEngines))
+ var wg sync.WaitGroup
+
+ for _, engine := range musicSearchEngines {
+ wg.Add(1)
+ go func(e MusicSearchEngine) {
+ defer wg.Done()
+ res, err := e.Func(query, page)
+ if err == nil && len(res) > 0 {
+ resultsChan <- res
+ }
+ }(engine)
+ }
+
+ go func() {
+ wg.Wait()
+ close(resultsChan)
+ }()
+
+ for res := range resultsChan {
+ results = append(results, res...)
+ if len(results) >= 50 { // Default max results
+ break
+ }
+ }
+
+ return deduplicateResults(results)
+}
+
+func prefetchMusicPages(query string, currentPage int) {
+ for _, page := range []int{currentPage - 1, currentPage + 1} {
+ if page < 1 {
+ continue
+ }
+ cacheKey := CacheKey{
+ Query: query,
+ Page: page,
+ Type: "music",
+ }
+ if _, found := resultsCache.Get(cacheKey); !found {
+ go fetchMusicResults(query, page)
+ }
+ }
+}
+
+func deduplicateResults(results []MusicResult) []MusicResult {
+ seen := make(map[string]bool)
+ var unique []MusicResult
+
+ for _, res := range results {
+ if !seen[res.URL] {
+ seen[res.URL] = true
+ unique = append(unique, res)
+ }
+ }
+ return unique
+}
+
+// func generatePlayerHTML(result MusicResult) template.HTML {
+// if result.IframeSrc != "" {
+// return template.HTML(fmt.Sprintf(
+// ``,
+// result.IframeSrc,
+// ))
+// }
+// return template.HTML("")
+// }
diff --git a/node-handle-search.go b/node-handle-search.go
index 7323710..bdb7c8f 100755
--- a/node-handle-search.go
+++ b/node-handle-search.go
@@ -1,218 +1,203 @@
+//go:build experimental
+// +build experimental
+
package main
import (
"encoding/json"
- "log"
)
-func handleSearchTextMessage(msg Message) {
- var searchParams struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
+type searchParams struct {
+ Query string `json:"query"`
+ Safe string `json:"safe"`
+ Lang string `json:"lang"`
+ Page int `json:"page"`
+ ResponseAddr string `json:"responseAddr"`
+}
+
+func extractTargetFromAddress(addr string) string {
+ if len(addr) > 5 && addr[len(addr)-5:] == ".sock" {
+ return addr[:len(addr)-5]
}
- err := json.Unmarshal([]byte(msg.Content), &searchParams)
- if err != nil {
- printWarn("Error parsing search parameters: %v", err)
+ return addr
+}
+
+// Utility to respond to any search
+func respondToSearch(req searchParams, msgType uint8, results any) {
+ if req.ResponseAddr == "" {
+ printErr("ResponseAddr is empty")
return
}
- printDebug("Received search-text request. ResponseAddr: %s", searchParams.ResponseAddr)
-
- results := fetchTextResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
- resultsJSON, err := json.Marshal(results)
+ respBytes, err := json.Marshal(results)
if err != nil {
- printWarn("Error marshalling search results: %v", err)
+ printWarn("Failed to marshal results for msg type %d: %v", msgType, err)
return
}
- responseMsg := Message{
- ID: hostID,
- Type: "text-results",
- Content: string(resultsJSON),
+ resp := Message{
+ ID: generateMessageID(),
+ Type: msgType,
+ Content: respBytes,
+ Target: req.ResponseAddr,
}
- // Log the address to be used for sending the response
- printDebug("Sending text search results to %s", searchParams.ResponseAddr)
-
- if searchParams.ResponseAddr == "" {
- printErr("Error: Response address is empty")
- return
- }
-
- err = sendMessage(searchParams.ResponseAddr, responseMsg)
+ err = sendMessage(resp)
if err != nil {
- printWarn("Error sending text search results to %s: %v", searchParams.ResponseAddr, err)
+ printWarn("Failed to send search results to %s: %v", req.ResponseAddr, err)
}
}
-func handleSearchImageMessage(msg Message) {
- var searchParams struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
- }
- err := json.Unmarshal([]byte(msg.Content), &searchParams)
- if err != nil {
- log.Printf("Error parsing search parameters: %v", err)
+func sendBinaryResponse(req searchParams, msgType uint8, payload []byte, msgID uint32) {
+ if req.ResponseAddr == "" {
+ printErr("ResponseAddr is empty")
return
}
- log.Printf("Received search-image request. ResponseAddr: %s", searchParams.ResponseAddr)
- results := fetchImageResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page, true)
- resultsJSON, err := json.Marshal(results)
+ resp := Message{
+ ID: msgID,
+ Type: msgType,
+ Content: payload,
+ Target: req.ResponseAddr,
+ }
+
+ if err := sendMessage(resp); err != nil {
+ printWarn("Failed to send binary search results: %v", err)
+ }
+}
+
+func handleSearchTextMessage(msg Message) {
+ var req searchParams
+ if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
+ printWarn("Invalid JSON: %v", err)
+ return
+ }
+ printDebug("Received search-text from %s", req.ResponseAddr)
+
+ results := fetchTextResults(req.Query, req.Safe, req.Lang, req.Page)
+ data, err := encodeTextResults(results)
if err != nil {
- log.Printf("Error marshalling search results: %v", err)
+ printWarn("Failed to encode text results: %v", err)
return
}
- responseMsg := Message{
- ID: hostID,
- Type: "image-results",
- Content: string(resultsJSON),
- }
-
- // Log the address to be used for sending the response
- log.Printf("Sending image search results to %s", searchParams.ResponseAddr)
-
- if searchParams.ResponseAddr == "" {
- log.Printf("Error: Response address is empty")
- return
- }
-
- err = sendMessage(searchParams.ResponseAddr, responseMsg)
- if err != nil {
- log.Printf("Error sending image search results to %s: %v", searchParams.ResponseAddr, err)
- }
+ sendBinaryResponse(req, MsgTypeSearchTextResponse, data, msg.ID)
}
func handleSearchVideoMessage(msg Message) {
- var searchParams struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
+ var req searchParams
+ if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
+ printWarn("Invalid JSON: %v", err)
+ return
}
- err := json.Unmarshal([]byte(msg.Content), &searchParams)
+ printDebug("Received search-video from %s", req.ResponseAddr)
+
+ results := fetchVideoResults(req.Query, req.Safe, req.Lang, req.Page)
+ data, err := encodeVideoResults(results)
if err != nil {
- log.Printf("Error parsing search parameters: %v", err)
+ printWarn("Failed to encode video results: %v", err)
return
}
- log.Printf("Received search-video request. ResponseAddr: %s", searchParams.ResponseAddr)
+ sendBinaryResponse(req, MsgTypeSearchVideoResponse, data, msg.ID)
+}
- results := fetchVideoResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
- resultsJSON, err := json.Marshal(results)
+func handleSearchMusicMessage(msg Message) {
+ var req searchParams
+ if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
+ printWarn("Invalid JSON: %v", err)
+ return
+ }
+ printDebug("Received search-music from %s", req.ResponseAddr)
+
+ results := fetchMusicResults(req.Query, req.Page)
+ data, err := encodeMusicResults(results)
if err != nil {
- log.Printf("Error marshalling search results: %v", err)
+ printWarn("Failed to encode music results: %v", err)
return
}
- responseMsg := Message{
- ID: hostID,
- Type: "video-results",
- Content: string(resultsJSON),
- }
-
- log.Printf("Sending video search results to %s", searchParams.ResponseAddr)
-
- if searchParams.ResponseAddr == "" {
- log.Printf("Error: Response address is empty")
- return
- }
-
- err = sendMessage(searchParams.ResponseAddr, responseMsg)
- if err != nil {
- log.Printf("Error sending video search results to %s: %v", searchParams.ResponseAddr, err)
- }
+ sendBinaryResponse(req, MsgTypeSearchMusicResponse, data, msg.ID)
}
func handleSearchFileMessage(msg Message) {
- var searchParams struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
+ var req searchParams
+ if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
+ printWarn("Invalid JSON: %v", err)
+ return
}
- err := json.Unmarshal([]byte(msg.Content), &searchParams)
+ printDebug("Received search-file from %s", req.ResponseAddr)
+
+ results := fetchFileResults(req.Query, req.Safe, req.Lang, req.Page)
+ data, err := encodeFileResults(results)
if err != nil {
- log.Printf("Error parsing search parameters: %v", err)
+ printWarn("Failed to encode file results: %v", err)
return
}
- log.Printf("Received search-file request. ResponseAddr: %s", searchParams.ResponseAddr)
-
- results := fetchFileResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
- resultsJSON, err := json.Marshal(results)
- if err != nil {
- log.Printf("Error marshalling search results: %v", err)
- return
- }
-
- responseMsg := Message{
- ID: hostID,
- Type: "file-results",
- Content: string(resultsJSON),
- }
-
- log.Printf("Sending file search results to %s", searchParams.ResponseAddr)
-
- if searchParams.ResponseAddr == "" {
- log.Printf("Error: Response address is empty")
- return
- }
-
- err = sendMessage(searchParams.ResponseAddr, responseMsg)
- if err != nil {
- log.Printf("Error sending file search results to %s: %v", searchParams.ResponseAddr, err)
- }
+ sendBinaryResponse(req, MsgTypeSearchFileResponse, data, msg.ID)
}
func handleSearchForumMessage(msg Message) {
- var searchParams struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
+ var req searchParams
+ if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
+ printWarn("Invalid JSON: %v", err)
+ return
}
- err := json.Unmarshal([]byte(msg.Content), &searchParams)
+ printDebug("Received search-forum from %s", req.ResponseAddr)
+
+ results := fetchForumResults(req.Query, req.Safe, req.Lang, req.Page)
+ data, err := encodeForumResults(results)
if err != nil {
- log.Printf("Error parsing search parameters: %v", err)
+ printWarn("Failed to encode forum results: %v", err)
return
}
- log.Printf("Received search-forum request. ResponseAddr: %s", searchParams.ResponseAddr)
-
- results := fetchForumResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
- resultsJSON, err := json.Marshal(results)
- if err != nil {
- log.Printf("Error marshalling search results: %v", err)
- return
- }
-
- responseMsg := Message{
- ID: hostID,
- Type: "forum-results",
- Content: string(resultsJSON),
- }
-
- // Log the address to be used for sending the response
- log.Printf("Sending forum search results to %s", searchParams.ResponseAddr)
-
- if searchParams.ResponseAddr == "" {
- log.Printf("Error: Response address is empty")
- return
- }
-
- err = sendMessage(searchParams.ResponseAddr, responseMsg)
- if err != nil {
- log.Printf("Error sending forum search results to %s: %v", searchParams.ResponseAddr, err)
- }
+ sendBinaryResponse(req, MsgTypeSearchForumResponse, data, msg.ID)
+}
+
+func handleSearchImageMessage(msg Message) {
+ var req searchParams
+ if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
+ printWarn("Invalid JSON: %v", err)
+ return
+ }
+ printDebug("Received image search type %d from %s", msg.Type, req.ResponseAddr)
+
+ var (
+ thumbsNeeded bool
+ fullNeeded bool
+ )
+
+ switch msg.Type {
+ case MsgTypeSearchImageRawRequest:
+ thumbsNeeded = false
+ fullNeeded = false
+ case MsgTypeSearchImageThumbRequest:
+ thumbsNeeded = true
+ fullNeeded = false
+ case MsgTypeSearchImageFullRequest:
+ thumbsNeeded = false
+ fullNeeded = true
+ case MsgTypeSearchImageAllRequest:
+ thumbsNeeded = true
+ fullNeeded = true
+ default:
+ printWarn("Unknown image search type: %d", msg.Type)
+ return
+ }
+
+ results := fetchImageResults(req.Query, req.Safe, req.Lang, req.Page, true, thumbsNeeded)
+
+ if fullNeeded || thumbsNeeded {
+ results = prepareProxiedImages(results, msg.Type)
+ }
+
+ data, err := encodeImageResults(results)
+ if err != nil {
+ printWarn("Failed to encode image results: %v", err)
+ return
+ }
+
+ sendBinaryResponse(req, MsgTypeSearchImageResponse, data, msg.ID)
}
diff --git a/node-master.go b/node-master.go
deleted file mode 100644
index 133f72e..0000000
--- a/node-master.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package main
-
-import (
- "log"
- "sync"
- "time"
-)
-
-var (
- isMaster bool
- masterNode string
- masterNodeMux sync.RWMutex
-)
-
-const (
- heartbeatInterval = 5 * time.Second
- heartbeatTimeout = 15 * time.Second
- electionTimeout = 10 * time.Second
-)
-
-func sendHeartbeats() {
- for {
- if !isMaster {
- return
- }
- for _, node := range peers {
- msg := Message{
- ID: hostID,
- Type: "heartbeat",
- Content: authCode,
- }
- err := sendMessage(node, msg)
- if err != nil {
- log.Printf("Error sending heartbeat to %s: %v", node, err)
- }
- }
- time.Sleep(heartbeatInterval)
- }
-}
-
-func checkMasterHeartbeat() {
- for {
- time.Sleep(heartbeatTimeout)
- masterNodeMux.RLock()
- if masterNode == authCode || masterNode == "" {
- masterNodeMux.RUnlock()
- continue
- }
- masterNodeMux.RUnlock()
-
- masterNodeMux.Lock()
- masterNode = ""
- masterNodeMux.Unlock()
- startElection()
- }
-}
-
-func startElection() {
- masterNodeMux.Lock()
- defer masterNodeMux.Unlock()
-
- for _, node := range peers {
- msg := Message{
- ID: hostID,
- Type: "election",
- Content: authCode,
- }
- err := sendMessage(node, msg)
- if err != nil {
- log.Printf("Error sending election message to %s: %v", node, err)
- }
- }
-
- isMaster = true
- go sendHeartbeats()
-}
-
-func handleHeartbeat(content string) {
- masterNodeMux.Lock()
- defer masterNodeMux.Unlock()
- masterNode = content
-}
-
-func handleElection(content string) {
- masterNodeMux.Lock()
- defer masterNodeMux.Unlock()
-
- if content < authCode {
- masterNode = content
- }
-}
diff --git a/node-request-files.go b/node-request-files.go
index 0cabf32..2153850 100755
--- a/node-request-files.go
+++ b/node-request-files.go
@@ -1,19 +1,22 @@
+//go:build experimental
+// +build experimental
+
package main
import (
- "encoding/json"
+ "bytes"
+ "encoding/binary"
"fmt"
"time"
)
-func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes []string) []TorrentResult {
- for _, nodeAddr := range peers {
- if contains(visitedNodes, nodeAddr) {
- continue // Skip nodes already visited
- }
- results, err := sendFileSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
+var fileResultsChan = make(chan []TorrentResult)
+
+func tryOtherNodesForFileSearch(query, safe, lang string, page int) []TorrentResult {
+ for _, nodeTarget := range sockets {
+ results, err := sendFileSearchRequestToNode(nodeTarget, query, safe, lang, page)
if err != nil {
- printWarn("Error contacting node %s: %v", nodeAddr, err)
+ printWarn("Error contacting node %s: %v", nodeTarget, err)
continue
}
if len(results) > 0 {
@@ -23,60 +26,123 @@ func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes
return nil
}
-func sendFileSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TorrentResult, error) {
- visitedNodes = append(visitedNodes, nodeAddr)
- searchParams := struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
- VisitedNodes []string `json:"visitedNodes"`
- }{
- Query: query,
- Safe: safe,
- Lang: lang,
- Page: page,
- ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
- VisitedNodes: visitedNodes,
- }
-
- msgBytes, err := json.Marshal(searchParams)
+func sendFileSearchRequestToNode(target, query, safe, lang string, page int) ([]TorrentResult, error) {
+ payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
- return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
+ return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
- ID: hostID,
- Type: "search-file",
- Content: string(msgBytes),
+ ID: generateMessageID(),
+ Type: MsgTypeSearchFileRequest,
+ Content: payload,
+ Target: target,
}
- err = sendMessage(nodeAddr, msg)
- if err != nil {
- return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
+ if err := sendMessage(msg); err != nil {
+ return nil, fmt.Errorf("send error: %v", err)
}
- // Wait for results
select {
case res := <-fileResultsChan:
return res, nil
case <-time.After(20 * time.Second):
- return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
+ return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleFileResultsMessage(msg Message) {
- var results []TorrentResult
- err := json.Unmarshal([]byte(msg.Content), &results)
+ results, err := decodeFileResults([]byte(msg.Content))
if err != nil {
- printWarn("Error unmarshalling file results: %v", err)
+ printWarn("Error decoding file results: %v", err)
return
}
-
printDebug("Received file results: %+v", results)
- // Send results to fileResultsChan
+
go func() {
fileResultsChan <- results
}()
}
+
+func encodeFileResults(results []TorrentResult) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
+ return nil, err
+ }
+
+ for _, r := range results {
+ if err := writeString(buf, r.URL); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, uint32(r.Seeders)); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, uint32(r.Leechers)); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Magnet); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, uint32(r.Views)); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Size); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Title); err != nil {
+ return nil, err
+ }
+ }
+ return buf.Bytes(), nil
+}
+
+func decodeFileResults(data []byte) ([]TorrentResult, error) {
+ buf := bytes.NewReader(data)
+
+ var count uint16
+ if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
+ return nil, err
+ }
+
+ results := make([]TorrentResult, 0, count)
+ for i := 0; i < int(count); i++ {
+ url, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ var seeders, leechers, views uint32
+ if err := binary.Read(buf, binary.BigEndian, &seeders); err != nil {
+ return nil, err
+ }
+ if err := binary.Read(buf, binary.BigEndian, &leechers); err != nil {
+ return nil, err
+ }
+ magnet, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ if err := binary.Read(buf, binary.BigEndian, &views); err != nil {
+ return nil, err
+ }
+ size, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ title, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+
+ results = append(results, TorrentResult{
+ URL: url,
+ Seeders: int(seeders),
+ Leechers: int(leechers),
+ Magnet: magnet,
+ Views: int(views),
+ Size: size,
+ Title: title,
+ })
+ }
+ return results, nil
+}
diff --git a/node-request-forums.go b/node-request-forums.go
index ff6ed2e..4f248ce 100755
--- a/node-request-forums.go
+++ b/node-request-forums.go
@@ -1,100 +1,129 @@
+//go:build experimental
+// +build experimental
+
package main
import (
- "encoding/json"
+ "bytes"
+ "encoding/binary"
"fmt"
"time"
)
var forumResultsChan = make(chan []ForumSearchResult)
-func tryOtherNodesForForumSearch(query, safe, lang string, page int) []ForumSearchResult {
- for _, nodeAddr := range peers {
- results, err := sendForumSearchRequestToNode(nodeAddr, query, safe, lang, page, []string{})
- if err != nil {
- printWarn("Error contacting node %s: %v", nodeAddr, err)
- continue
- }
- if len(results) > 0 {
- return results
- }
- }
- return nil
-}
-
-func sendForumSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
- // Check if the current node has already been visited
- for _, node := range visitedNodes {
- if node == hostID {
- return nil, fmt.Errorf("loop detected: this node (%s) has already been visited", hostID)
- }
- }
-
- // Add current node to the list of visited nodes
- visitedNodes = append(visitedNodes, hostID)
-
- searchParams := struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
- VisitedNodes []string `json:"visitedNodes"`
- }{
- Query: query,
- Safe: safe,
- Lang: lang,
- Page: page,
- ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
- VisitedNodes: visitedNodes,
- }
-
- msgBytes, err := json.Marshal(searchParams)
+func sendForumSearchRequestToNode(target, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
+ payload, err := encodeSearchTextParams(query, safe, lang, page) // reuse text param encoding
if err != nil {
- return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
+ return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
- ID: hostID,
- Type: "search-forum",
- Content: string(msgBytes),
+ ID: generateMessageID(),
+ Type: MsgTypeSearchForumRequest,
+ Content: payload,
+ Target: target,
}
- err = sendMessage(nodeAddr, msg)
- if err != nil {
- return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
+ if err := sendMessage(msg); err != nil {
+ return nil, fmt.Errorf("send error: %v", err)
}
- // Wait for results
select {
case res := <-forumResultsChan:
return res, nil
case <-time.After(20 * time.Second):
- return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
+ return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleForumResultsMessage(msg Message) {
- var results []ForumSearchResult
- err := json.Unmarshal([]byte(msg.Content), &results)
+ results, err := decodeForumResults([]byte(msg.Content))
if err != nil {
- printWarn("Error unmarshalling forum results: %v", err)
+ printWarn("Error decoding forum results: %v", err)
return
}
-
printDebug("Received forum results: %+v", results)
- // Send results to forumResultsChan
+
go func() {
forumResultsChan <- results
}()
}
-// Used only to answer requests
-func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
- results, err := PerformRedditSearch(query, safe, page)
- if err != nil {
- printWarn("Error fetching forum results: %v", err)
- return nil
+func encodeForumResults(results []ForumSearchResult) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
+ return nil, err
}
- return results
+
+ for _, r := range results {
+ if err := writeString(buf, r.URL); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Header); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Description); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, r.PublishedDate.Unix()); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.ImgSrc); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.ThumbnailSrc); err != nil {
+ return nil, err
+ }
+ }
+
+ return buf.Bytes(), nil
+}
+
+func decodeForumResults(data []byte) ([]ForumSearchResult, error) {
+ buf := bytes.NewReader(data)
+
+ var count uint16
+ if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
+ return nil, err
+ }
+
+ results := make([]ForumSearchResult, 0, count)
+ for i := 0; i < int(count); i++ {
+ url, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ header, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ desc, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ var unixTime int64
+ if err := binary.Read(buf, binary.BigEndian, &unixTime); err != nil {
+ return nil, err
+ }
+ imgSrc, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ thumbSrc, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+
+ results = append(results, ForumSearchResult{
+ URL: url,
+ Header: header,
+ Description: desc,
+ PublishedDate: time.Unix(unixTime, 0),
+ ImgSrc: imgSrc,
+ ThumbnailSrc: thumbSrc,
+ })
+ }
+ return results, nil
}
diff --git a/node-request-images.go b/node-request-images.go
index 4e3e9e3..db980c1 100755
--- a/node-request-images.go
+++ b/node-request-images.go
@@ -1,84 +1,198 @@
+//go:build experimental
+// +build experimental
+
package main
import (
- "encoding/json"
+ "bytes"
+ "encoding/binary"
"fmt"
"time"
)
+// const (
+// MessageTypeSearchImage uint8 = 11
+// MessageTypeSearchImageThumb uint8 = 111
+// MessageTypeSearchImageFull uint8 = 112
+// MessageTypeSearchImageAllProxy uint8 = 113
+// MessageTypeImageResults uint8 = 22
+// )
+
var imageResultsChan = make(chan []ImageSearchResult)
-func handleImageResultsMessage(msg Message) {
- var results []ImageSearchResult
- err := json.Unmarshal([]byte(msg.Content), &results)
+func sendImageSearchRequestToNode(target, query, safe, lang string, page int, mode uint8) ([]ImageSearchResult, error) {
+ payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
- printWarn("Error unmarshalling image results: %v", err)
- return
+ return nil, fmt.Errorf("encode error: %v", err)
}
+ msg := Message{
+ ID: generateMessageID(),
+ Type: mode, // one of the image search types
+ Content: payload,
+ Target: target,
+ }
+
+ if err := sendMessage(msg); err != nil {
+ return nil, fmt.Errorf("send error: %v", err)
+ }
+
+ select {
+ case res := <-imageResultsChan:
+ return res, nil
+ case <-time.After(30 * time.Second):
+ return nil, fmt.Errorf("timeout from %s", target)
+ }
+}
+
+func handleImageResultsMessage(msg Message) {
+ results, err := decodeImageResults([]byte(msg.Content))
+ if err != nil {
+ printWarn("Error decoding image results: %v", err)
+ return
+ }
printDebug("Received image results: %+v", results)
- // Send results to imageResultsChan
+
go func() {
imageResultsChan <- results
}()
}
-func sendImageSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ImageSearchResult, error) {
- visitedNodes = append(visitedNodes, nodeAddr)
- searchParams := struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
- VisitedNodes []string `json:"visitedNodes"`
- }{
- Query: query,
- Safe: safe,
- Lang: lang,
- Page: page,
- ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
- VisitedNodes: visitedNodes,
+func encodeImageResults(results []ImageSearchResult) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
+ return nil, err
}
- msgBytes, err := json.Marshal(searchParams)
- if err != nil {
- return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
+ for _, r := range results {
+ if err := writeString(buf, r.ID); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Title); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Full); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Thumb); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.ProxyFull); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.ProxyThumb); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Source); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, int32(r.Width)); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, int32(r.Height)); err != nil {
+ return nil, err
+ }
}
- msg := Message{
- ID: hostID,
- Type: "search-image",
- Content: string(msgBytes),
- }
-
- err = sendMessage(nodeAddr, msg)
- if err != nil {
- return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
- }
-
- // Wait for results
- select {
- case res := <-imageResultsChan:
- return res, nil
- case <-time.After(30 * time.Second):
- return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
- }
+ return buf.Bytes(), nil
}
-func tryOtherNodesForImageSearch(query, safe, lang string, page int, visitedNodes []string) []ImageSearchResult {
- for _, nodeAddr := range peers {
- if contains(visitedNodes, nodeAddr) {
- continue // Skip nodes already visited
+func decodeImageResults(data []byte) ([]ImageSearchResult, error) {
+ buf := bytes.NewReader(data)
+
+ var count uint16
+ if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
+ return nil, err
+ }
+
+ results := make([]ImageSearchResult, 0, count)
+ for i := 0; i < int(count); i++ {
+ id, _ := readString(buf)
+ title, _ := readString(buf)
+ full, _ := readString(buf)
+ thumb, _ := readString(buf)
+ proxyFull, _ := readString(buf)
+ proxyThumb, _ := readString(buf)
+ source, _ := readString(buf)
+
+ var width, height int32
+ if err := binary.Read(buf, binary.BigEndian, &width); err != nil {
+ return nil, err
}
- results, err := sendImageSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
- if err != nil {
- printWarn("Error contacting node %s: %v", nodeAddr, err)
- continue
+ if err := binary.Read(buf, binary.BigEndian, &height); err != nil {
+ return nil, err
}
- if len(results) > 0 {
- return results
+
+ results = append(results, ImageSearchResult{
+ ID: id,
+ Title: title,
+ Full: full,
+ Thumb: thumb,
+ ProxyFull: proxyFull,
+ ProxyThumb: proxyThumb,
+ Source: source,
+ Width: int(width),
+ Height: int(height),
+ })
+ }
+
+ return results, nil
+}
+
+func prepareProxiedImages(results []ImageSearchResult, mode uint8) []ImageSearchResult {
+ for i := range results {
+ switch mode {
+ case MsgTypeSearchImageThumbRequest:
+ results[i].ProxyThumb = "/image/" + results[i].ID + "_thumb.webp"
+ case MsgTypeSearchImageFullRequest:
+ results[i].ProxyFull = "/image/" + results[i].ID + "_full.webp"
+ case MsgTypeSearchImageAllRequest:
+ results[i].ProxyThumb = "/image/" + results[i].ID + "_thumb.webp"
+ results[i].ProxyFull = "/image/" + results[i].ID + "_full.webp"
}
}
- return nil
+ return results
}
+
+// func handleSearchImageMessage(msg Message) {
+// query, safe, lang, page, err := decodeSearchTextParams([]byte(msg.Content))
+// if err != nil {
+// printWarn("Error decoding image search parameters: %v", err)
+// return
+// }
+
+// results := fetchImageResults(query, safe, lang, page, true, true)
+
+// switch msg.Type {
+// case MsgTypeSearchImageRawRequest:
+// // No proxy URLs needed
+
+// case MsgTypeSearchImageThumbRequest:
+// results = prepareProxiedImages(results, MsgTypeSearchImageThumbRequest)
+// case MsgTypeSearchImageFullRequest:
+// results = prepareProxiedImages(results, MsgTypeSearchImageFullRequest)
+// case MsgTypeSearchImageAllRequest:
+// results = prepareProxiedImages(results, MsgTypeSearchImageAllRequest)
+// default:
+// printWarn("Unknown image request mode: %d", msg.Type)
+// return
+// }
+
+// payload, err := encodeImageResults(results)
+// if err != nil {
+// printWarn("Error encoding image search results: %v", err)
+// return
+// }
+
+// response := Message{
+// ID: msg.ID,
+// Type: MsgTypeSearchImageResponse,
+// Content: string(payload),
+// Target: msg.Source, // Reply to sender
+// }
+
+// if err := sendMessage(response); err != nil {
+// printWarn("Error sending image search response: %v", err)
+// }
+// }
diff --git a/node-request-music.go b/node-request-music.go
new file mode 100644
index 0000000..e7d4961
--- /dev/null
+++ b/node-request-music.go
@@ -0,0 +1,187 @@
+//go:build experimental
+// +build experimental
+
+package main
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "time"
+)
+
+var musicResultsChan = make(chan []MusicResult)
+
+func tryOtherNodesForMusicSearch(query, lang string, safe bool, page int) []MusicResult {
+ safeStr := "inactive"
+ if safe {
+ safeStr = "active"
+ }
+ for _, nodeTarget := range sockets {
+ results, err := sendMusicSearchRequestToNode(nodeTarget, query, safeStr, lang, page)
+ if err != nil {
+ printWarn("Error contacting node %s: %v", nodeTarget, err)
+ continue
+ }
+ if len(results) > 0 {
+ return results
+ }
+ }
+ return nil
+}
+
+func sendMusicSearchRequestToNode(target, query, safe, lang string, page int) ([]MusicResult, error) {
+ payload, err := encodeSearchTextParams(query, safe, lang, page)
+ if err != nil {
+ return nil, fmt.Errorf("encode error: %v", err)
+ }
+
+ msg := Message{
+ ID: generateMessageID(),
+ Type: MsgTypeSearchMusicRequest,
+ Content: payload,
+ Target: target,
+ }
+
+ err = sendMessage(msg)
+ if err != nil {
+ return nil, fmt.Errorf("failed to send music request to node %s: %v", target, err)
+ }
+
+ select {
+ case res := <-musicResultsChan:
+ return res, nil
+ case <-time.After(20 * time.Second):
+ return nil, fmt.Errorf("timeout waiting for music results from node %s", target)
+ }
+}
+
+func handleMusicResultsMessage(msg Message) {
+ results, err := decodeMusicResults([]byte(msg.Content))
+ if err != nil {
+ printWarn("Error decoding music results: %v", err)
+ return
+ }
+ printDebug("Received music results: %+v", results)
+
+ go func() {
+ musicResultsChan <- results
+ }()
+}
+
+func encodeMusicResults(results []MusicResult) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
+ return nil, err
+ }
+
+ for _, r := range results {
+ if err := writeString(buf, r.URL); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Title); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Artist); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Description); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.PublishedDate); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Thumbnail); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Source); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Duration); err != nil {
+ return nil, err
+ }
+ }
+
+ return buf.Bytes(), nil
+}
+
+func decodeMusicResults(data []byte) ([]MusicResult, error) {
+ buf := bytes.NewReader(data)
+
+ var count uint16
+ if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
+ return nil, err
+ }
+
+ results := make([]MusicResult, 0, count)
+ for i := 0; i < int(count); i++ {
+ url, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ title, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ artist, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ description, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ date, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ thumb, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ source, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ duration, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+
+ results = append(results, MusicResult{
+ URL: url,
+ Title: title,
+ Artist: artist,
+ Description: description,
+ PublishedDate: date,
+ Thumbnail: thumb,
+ Source: source,
+ Duration: duration,
+ })
+ }
+ return results, nil
+}
+
+// func handleMusicSearchRequest(msg Message) {
+// buf := bytes.NewReader([]byte(msg.Content))
+// query, _ := readString(buf)
+
+// var page uint16
+// binary.Read(buf, binary.BigEndian, &page)
+
+// results := fetchMusicResults(query, int(page))
+// encoded, err := encodeMusicResults(results)
+// if err != nil {
+// printWarn("Encoding music results failed: %v", err)
+// return
+// }
+
+// reply := Message{
+// ID: msg.ID,
+// Type: MsgTypeSearchMusicResponse,
+// Content: string(encoded),
+// Target: msg.Target, // Send back to sender
+// }
+// sendMessage(reply)
+// }
diff --git a/node-request-text.go b/node-request-text.go
index ebe6041..ad9bacf 100755
--- a/node-request-text.go
+++ b/node-request-text.go
@@ -1,21 +1,23 @@
+//go:build experimental
+// +build experimental
+
package main
import (
- "encoding/json"
+ "bytes"
+ "encoding/binary"
"fmt"
"time"
)
var textResultsChan = make(chan []TextSearchResult)
-func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes []string) []TextSearchResult {
- for _, nodeAddr := range peers {
- if contains(visitedNodes, nodeAddr) {
- continue // Skip nodes already visited
- }
- results, err := sendTextSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
+// Try other nodes is not defined for every type
+func tryOtherNodesForTextSearch(query, safe, lang string, page int) []TextSearchResult {
+ for _, nodeTarget := range sockets {
+ results, err := sendTextSearchRequestToNode(nodeTarget, query, safe, lang, page)
if err != nil {
- printWarn("Error contacting node %s: %v", nodeAddr, err)
+ printWarn("Error contacting node %s: %v", nodeTarget, err)
continue
}
if len(results) > 0 {
@@ -25,60 +27,102 @@ func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes
return nil
}
-func sendTextSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TextSearchResult, error) {
- visitedNodes = append(visitedNodes, nodeAddr)
- searchParams := struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
- VisitedNodes []string `json:"visitedNodes"`
- }{
- Query: query,
- Safe: safe,
- Lang: lang,
- Page: page,
- ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
- VisitedNodes: visitedNodes,
- }
-
- msgBytes, err := json.Marshal(searchParams)
+func sendTextSearchRequestToNode(target, query, safe, lang string, page int) ([]TextSearchResult, error) {
+ payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
- return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
+ return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
- ID: hostID,
- Type: "search-text",
- Content: string(msgBytes),
+ ID: generateMessageID(), // assume function returns uint32
+ Type: MsgTypeSearchTextRequest,
+ Content: payload,
+ Target: target,
}
- err = sendMessage(nodeAddr, msg)
+ err = sendMessage(msg)
if err != nil {
- return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
+ return nil, fmt.Errorf("failed to send search request to node %s: %v", target, err)
}
- // Wait for results
select {
case res := <-textResultsChan:
return res, nil
case <-time.After(20 * time.Second):
- return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
+ return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleTextResultsMessage(msg Message) {
- var results []TextSearchResult
- err := json.Unmarshal([]byte(msg.Content), &results)
+ results, err := decodeTextResults([]byte(msg.Content))
if err != nil {
- printWarn("Error unmarshalling text results: %v", err)
+ printWarn("Error decoding text results: %v", err)
return
}
-
printDebug("Received text results: %+v", results)
- // Send results to textResultsChan
+
go func() {
textResultsChan <- results
}()
}
+
+func encodeTextResults(results []TextSearchResult) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
+ return nil, err
+ }
+
+ for _, r := range results {
+ if err := writeString(buf, r.URL); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Header); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Description); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Source); err != nil {
+ return nil, err
+ }
+ }
+
+ return buf.Bytes(), nil
+}
+
+func decodeTextResults(data []byte) ([]TextSearchResult, error) {
+ buf := bytes.NewReader(data)
+
+ var count uint16
+ if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
+ return nil, err
+ }
+
+ results := make([]TextSearchResult, 0, count)
+ for i := 0; i < int(count); i++ {
+ url, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ header, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ description, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ source, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ results = append(results, TextSearchResult{
+ URL: url,
+ Header: header,
+ Description: description,
+ Source: source,
+ })
+ }
+ return results, nil
+}
diff --git a/node-request-video.go b/node-request-video.go
index d965a7d..e213e2c 100755
--- a/node-request-video.go
+++ b/node-request-video.go
@@ -1,19 +1,22 @@
+//go:build experimental
+// +build experimental
+
package main
import (
- "encoding/json"
+ "bytes"
+ "encoding/binary"
"fmt"
"time"
)
-func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNodes []string) []VideoResult {
- for _, nodeAddr := range peers {
- if contains(visitedNodes, nodeAddr) {
- continue // Skip nodes already visited
- }
- results, err := sendVideoSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
+var videoResultsChan = make(chan []VideoResult)
+
+func tryOtherNodesForVideoSearch(query, safe, lang string, page int) []VideoResult {
+ for _, node := range sockets {
+ results, err := sendVideoSearchRequestToNode(node, query, safe, lang, page)
if err != nil {
- printWarn("Error contacting node %s: %v", nodeAddr, err)
+ printWarn("Error contacting node %s: %v", node, err)
continue
}
if len(results) > 0 {
@@ -23,60 +26,134 @@ func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNode
return nil
}
-func sendVideoSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]VideoResult, error) {
- visitedNodes = append(visitedNodes, nodeAddr)
- searchParams := struct {
- Query string `json:"query"`
- Safe string `json:"safe"`
- Lang string `json:"lang"`
- Page int `json:"page"`
- ResponseAddr string `json:"responseAddr"`
- VisitedNodes []string `json:"visitedNodes"`
- }{
- Query: query,
- Safe: safe,
- Lang: lang,
- Page: page,
- ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
- VisitedNodes: visitedNodes,
- }
-
- msgBytes, err := json.Marshal(searchParams)
+func sendVideoSearchRequestToNode(target, query, safe, lang string, page int) ([]VideoResult, error) {
+ payload, err := encodeSearchTextParams(query, safe, lang, page)
if err != nil {
- return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
+ return nil, fmt.Errorf("encode error: %v", err)
}
msg := Message{
- ID: hostID,
- Type: "search-video",
- Content: string(msgBytes),
+ ID: generateMessageID(),
+ Type: MsgTypeSearchVideoRequest,
+ Content: payload,
+ Target: target,
}
- err = sendMessage(nodeAddr, msg)
- if err != nil {
- return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
+ if err := sendMessage(msg); err != nil {
+ return nil, fmt.Errorf("send error: %v", err)
}
- // Wait for results
select {
case res := <-videoResultsChan:
return res, nil
case <-time.After(20 * time.Second):
- return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
+ return nil, fmt.Errorf("timeout waiting for results from node %s", target)
}
}
func handleVideoResultsMessage(msg Message) {
- var results []VideoResult
- err := json.Unmarshal([]byte(msg.Content), &results)
+ results, err := decodeVideoResults([]byte(msg.Content))
if err != nil {
- printWarn("Error unmarshalling video results: %v", err)
+ printWarn("Error decoding video results: %v", err)
return
}
-
printDebug("Received video results: %+v", results)
- // Send results to videoResultsChan
+
go func() {
videoResultsChan <- results
}()
}
+
+func encodeVideoResults(results []VideoResult) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
+ return nil, err
+ }
+
+ for _, r := range results {
+ if err := writeString(buf, r.Href); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Title); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Date); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Views); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Creator); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Publisher); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Image); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, r.Duration); err != nil {
+ return nil, err
+ }
+ }
+
+ return buf.Bytes(), nil
+}
+
+func decodeVideoResults(data []byte) ([]VideoResult, error) {
+ buf := bytes.NewReader(data)
+
+ var count uint16
+ if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
+ return nil, err
+ }
+
+ results := make([]VideoResult, 0, count)
+ for i := 0; i < int(count); i++ {
+ href, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ title, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ date, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ views, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ creator, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ publisher, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ image, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+ duration, err := readString(buf)
+ if err != nil {
+ return nil, err
+ }
+
+ results = append(results, VideoResult{
+ Href: href,
+ Title: title,
+ Date: date,
+ Views: views,
+ Creator: creator,
+ Publisher: publisher,
+ Image: image,
+ Duration: duration,
+ })
+ }
+ return results, nil
+}
diff --git a/node-update.go b/node-update.go
deleted file mode 100644
index f433eb4..0000000
--- a/node-update.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package main
-
-import (
- "fmt"
- "log"
- "time"
-)
-
-// Function to sync updates across all nodes
-func nodeUpdateSync() {
- fmt.Println("Syncing updates across all nodes...")
- for _, peerAddr := range peers {
- fmt.Printf("Notifying node %s about update...\n", peerAddr)
- msg := Message{
- ID: hostID,
- Type: "update",
- Content: "Start update process",
- }
- err := sendMessage(peerAddr, msg)
- if err != nil {
- log.Printf("Failed to notify node %s: %v\n", peerAddr, err)
- continue
- }
- fmt.Printf("Node %s notified. Waiting for it to update...\n", peerAddr)
- time.Sleep(30 * time.Second) // Adjust sleep time as needed to allow for updates
- }
- fmt.Println("All nodes have been updated.")
-}
diff --git a/node.go b/node.go
index 5fd247a..5b70063 100644
--- a/node.go
+++ b/node.go
@@ -1,75 +1,152 @@
+//go:build experimental
+// +build experimental
+
package main
import (
"bytes"
- "crypto/rand"
+ "encoding/binary"
"encoding/json"
+ "errors"
"fmt"
- "io/ioutil"
+ "io"
+ "net"
"net/http"
+ "os"
"time"
)
var (
- authCode string
- peers []string
- hostID string
+ sockets []string
+ hostID string
+ socketDir string
)
type Message struct {
- ID string `json:"id"`
- Type string `json:"type"`
- Content string `json:"content"`
- VisitedNodes []string `json:"visitedNodes"`
+ ID uint32
+ Type uint8
+ Content []byte
+ Target string
}
+const (
+ MsgTypeNone uint8 = 0
+ MsgTypeTest uint8 = 1
+
+ // Request types (10–99)
+ MsgTypeSearchTextRequest uint8 = 10
+ MsgTypeSearchImageRawRequest uint8 = 11
+ MsgTypeSearchImageThumbRequest uint8 = 12
+ MsgTypeSearchImageFullRequest uint8 = 13
+ MsgTypeSearchImageAllRequest uint8 = 14
+ MsgTypeSearchVideoRequest uint8 = 15
+ MsgTypeSearchFileRequest uint8 = 16
+ MsgTypeSearchForumRequest uint8 = 17
+ MsgTypeSearchMusicRequest uint8 = 18
+
+ // Response types (110–199)
+ MsgTypeSearchTextResponse uint8 = 110
+ MsgTypeSearchImageResponse uint8 = 111
+ MsgTypeSearchVideoResponse uint8 = 112
+ MsgTypeSearchFileResponse uint8 = 113
+ MsgTypeSearchForumResponse uint8 = 114
+ MsgTypeSearchMusicResponse uint8 = 115
+)
+
func loadNodeConfig() {
- authCode = config.AuthCode
- peers = config.Peers
+ sockets = config.Nodes
+ socketDir = "/tmp/" // Directory where sockets are stored, for now fixed tmp dir, can be changed later
}
-func generateHostID() (string, error) {
- bytes := make([]byte, 16)
- _, err := rand.Read(bytes)
- if err != nil {
- return "", fmt.Errorf("failed to generate host ID: %v", err)
+var messageIDCounter uint32 = 0
+
+func generateMessageID() uint32 {
+ if messageIDCounter == ^uint32(0) { // 0xFFFFFFFF
+ messageIDCounter = 1
+ } else {
+ messageIDCounter++
}
- return fmt.Sprintf("%x", bytes), nil
+ return messageIDCounter
}
-func sendMessage(serverAddr string, msg Message) error {
- if serverAddr == "" {
- return fmt.Errorf("server address is empty")
+func encodeSearchTextParams(query, safe, lang string, page int) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := writeString(buf, query); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, safe); err != nil {
+ return nil, err
+ }
+ if err := writeString(buf, lang); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, uint16(page)); err != nil {
+ return nil, err
}
- msgBytes, err := json.Marshal(msg)
+ return buf.Bytes(), nil
+}
+
+func sendMessage(msg Message) error {
+ socketPath := socketDir + msg.Target + ".sock"
+
+ conn, err := net.Dial("unix", socketPath)
if err != nil {
- return fmt.Errorf("failed to marshal message: %v", err)
+ return fmt.Errorf("failed to connect to socket %s: %v", socketPath, err)
}
+ defer conn.Close()
- req, err := http.NewRequest("POST", serverAddr, bytes.NewBuffer(msgBytes))
+ msgBytes, err := serializeMessage(msg)
if err != nil {
- return fmt.Errorf("failed to create request: %v", err)
- }
- req.Header.Set("Content-Type", "application/json")
- req.Header.Set("Authorization", authCode)
-
- client := &http.Client{
- Timeout: time.Second * 10,
+ return fmt.Errorf("serialization error: %v", err)
}
- resp, err := client.Do(req)
+ _, err = conn.Write(msgBytes)
+ return err
+}
+
+func startUnixSocketServer(socketName string) {
+ socketPath := socketDir + socketName + ".sock"
+
+ if _, err := os.Stat(socketPath); err == nil {
+ os.Remove(socketPath)
+ }
+
+ listener, err := net.Listen("unix", socketPath)
if err != nil {
- return fmt.Errorf("failed to send request: %v", err)
+ panic(fmt.Sprintf("Failed to listen on %s: %v", socketPath, err))
}
- defer resp.Body.Close()
+ defer listener.Close()
+ os.Chmod(socketPath, 0666)
- if resp.StatusCode != http.StatusOK {
- body, _ := ioutil.ReadAll(resp.Body)
- return fmt.Errorf("server error: %s", body)
+ printInfo("Listening on UNIX socket: %s", socketPath)
+
+ for {
+ conn, err := listener.Accept()
+ if err != nil {
+ printWarn("Accept error: %v", err)
+ continue
+ }
+
+ go func(c net.Conn) {
+ defer c.Close()
+ buf, err := io.ReadAll(c)
+ if err != nil {
+ printWarn("Read error: %v", err)
+ return
+ }
+
+ msg, err := deserializeMessage(buf)
+ if err != nil {
+ printWarn("Deserialization error: %v", err)
+ return
+ }
+
+ printDebug("Received binary message: %+v", msg)
+ interpretMessage(msg)
+ }(conn)
}
-
- return nil
}
func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
@@ -78,12 +155,6 @@ func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
return
}
- auth := r.Header.Get("Authorization")
- if auth != authCode {
- http.Error(w, "Unauthorized", http.StatusUnauthorized)
- return
- }
-
var msg Message
err := json.NewDecoder(r.Body).Decode(&msg)
if err != nil {
@@ -92,64 +163,177 @@ func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
}
defer r.Body.Close()
- printDebug("Received message: %+v\n", msg)
- w.Write([]byte("Message received"))
-
+ printDebug("Received HTTP message: %+v", msg)
interpretMessage(msg)
}
-func startNodeClient() {
- for {
- for _, peerAddr := range peers {
- msg := Message{
- ID: hostID,
- Type: "test",
- Content: "This is a test message from the client node",
- }
+func startNodeClientUnix() {
+ var idCounter uint32 = 0
- err := sendMessage(peerAddr, msg)
- if err != nil {
- printWarn("Error sending message to %s: %v", peerAddr, err)
- } else {
- printInfo("Message sent successfully to: %s", peerAddr)
- }
+ for {
+ msg := Message{
+ ID: idCounter,
+ Type: MsgTypeTest,
+ Content: []byte("This is a test message via UNIX socket"),
+ Target: "node2", ///!!!
+ }
+ idCounter++
+
+ if err := sendMessage(msg); err != nil {
+ printWarn("Send error: %v", err)
}
time.Sleep(10 * time.Second)
}
}
func interpretMessage(msg Message) {
+ printDebug("Received message: %s", msg.Content)
+
switch msg.Type {
- case "test":
- printDebug("Received test message: %v", msg.Content)
- case "update":
- printDebug("Received update message: %v", msg.Content)
- go update()
- case "heartbeat":
- handleHeartbeat(msg.Content)
- case "election":
- handleElection(msg.Content)
- case "search-text":
+ case MsgTypeTest:
+ handleTestMessage(msg)
+ case MsgTypeSearchTextRequest:
handleSearchTextMessage(msg)
- case "search-image":
+ case MsgTypeSearchImageRawRequest, MsgTypeSearchImageThumbRequest, MsgTypeSearchImageFullRequest, MsgTypeSearchImageAllRequest:
handleSearchImageMessage(msg)
- case "search-video":
+ case MsgTypeSearchVideoRequest:
handleSearchVideoMessage(msg)
- case "search-file":
+ case MsgTypeSearchFileRequest:
handleSearchFileMessage(msg)
- case "search-forum":
+ case MsgTypeSearchForumRequest:
handleSearchForumMessage(msg)
- case "forum-results":
- handleForumResultsMessage(msg)
- case "text-results":
+ case MsgTypeSearchMusicRequest:
+ handleSearchMusicMessage(msg)
+
+ case MsgTypeSearchTextResponse:
handleTextResultsMessage(msg)
- case "image-results":
+ case MsgTypeSearchImageResponse:
handleImageResultsMessage(msg)
- case "video-results":
+ case MsgTypeSearchVideoResponse:
handleVideoResultsMessage(msg)
- case "file-results":
+ case MsgTypeSearchFileResponse:
handleFileResultsMessage(msg)
+ case MsgTypeSearchForumResponse:
+ handleForumResultsMessage(msg)
+ case MsgTypeSearchMusicResponse:
+ handleMusicResultsMessage(msg)
+
default:
- printWarn("Received unknown message type: %v", msg.Type)
+ printWarn("Unknown message type: %d", msg.Type)
+ }
+}
+
+// Serialize Message to binary
+func serializeMessage(msg Message) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if err := binary.Write(buf, binary.BigEndian, msg.ID); err != nil {
+ return nil, err
+ }
+ if err := binary.Write(buf, binary.BigEndian, msg.Type); err != nil {
+ return nil, err
+ }
+
+ // Content
+ contentBytes := []byte(msg.Content)
+ if len(contentBytes) > 65535 {
+ return nil, errors.New("content too long")
+ }
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(contentBytes))); err != nil {
+ return nil, err
+ }
+ buf.Write(contentBytes)
+
+ // Target
+ targetBytes := []byte(msg.Target)
+ if len(targetBytes) > 255 {
+ return nil, errors.New("target name too long")
+ }
+ if err := buf.WriteByte(uint8(len(targetBytes))); err != nil {
+ return nil, err
+ }
+ buf.Write(targetBytes)
+
+ return buf.Bytes(), nil
+}
+
+// Deserialize binary to Message
+func deserializeMessage(data []byte) (Message, error) {
+ buf := bytes.NewReader(data)
+ var msg Message
+
+ if err := binary.Read(buf, binary.BigEndian, &msg.ID); err != nil {
+ return msg, err
+ }
+ if err := binary.Read(buf, binary.BigEndian, &msg.Type); err != nil {
+ return msg, err
+ }
+
+ var contentLen uint16
+ if err := binary.Read(buf, binary.BigEndian, &contentLen); err != nil {
+ return msg, err
+ }
+ content := make([]byte, contentLen)
+ if _, err := io.ReadFull(buf, content); err != nil {
+ return msg, err
+ }
+ msg.Content = content
+
+ var targetLen uint8
+ if err := binary.Read(buf, binary.BigEndian, &targetLen); err != nil {
+ return msg, err
+ }
+ target := make([]byte, targetLen)
+ if _, err := io.ReadFull(buf, target); err != nil {
+ return msg, err
+ }
+ msg.Target = string(target)
+
+ return msg, nil
+}
+
+func writeString(buf *bytes.Buffer, s string) error {
+ if err := binary.Write(buf, binary.BigEndian, uint16(len(s))); err != nil {
+ return err
+ }
+ _, err := buf.Write([]byte(s))
+ return err
+}
+
+func readString(buf *bytes.Reader) (string, error) {
+ var length uint16
+ if err := binary.Read(buf, binary.BigEndian, &length); err != nil {
+ return "", err
+ }
+ strBytes := make([]byte, length)
+ if _, err := io.ReadFull(buf, strBytes); err != nil {
+ return "", err
+ }
+ return string(strBytes), nil
+}
+
+type testPayload struct {
+ Message string `json:"message"`
+ ResponseAddr string `json:"ResponseAddr"`
+}
+
+func handleTestMessage(msg Message) {
+ var payload testPayload
+ if err := json.Unmarshal([]byte(msg.Content), &payload); err != nil {
+ printWarn("Failed to parse test payload: %v", err)
+ return
+ }
+ printDebug("Received message: %s", payload.Message)
+ printInfo("Received TEST message: %s", payload.Message)
+
+ reply := Message{
+ ID: msg.ID,
+ Type: MsgTypeTest,
+ Content: []byte("hello test"),
+ Target: payload.ResponseAddr,
+ }
+
+ if err := sendMessage(reply); err != nil {
+ printWarn("Failed to send test response: %v", err)
}
}
diff --git a/open-search.go b/open-search.go
index fefce8f..40caceb 100644
--- a/open-search.go
+++ b/open-search.go
@@ -7,31 +7,60 @@ import (
)
type OpenSearchDescription struct {
- XMLName xml.Name `xml:"OpenSearchDescription"`
- Xmlns string `xml:"xmlns,attr"`
- ShortName string `xml:"ShortName"`
- Description string `xml:"Description"`
- Tags string `xml:"Tags"`
- URLs []URL `xml:"Url"`
+ XMLName xml.Name `xml:"OpenSearchDescription"`
+ Xmlns string `xml:"xmlns,attr"`
+ ShortName string `xml:"ShortName"`
+ LongName string `xml:"LongName"`
+ Description string `xml:"Description"`
+ Tags string `xml:"Tags,omitempty"`
+ InputEncoding string `xml:"InputEncoding"`
+ OutputEncoding string `xml:"OutputEncoding"`
+ Images []Image `xml:"Image"`
+ URLs []URL `xml:"Url"`
}
type URL struct {
Type string `xml:"type,attr"`
+ Method string `xml:"method,attr,omitempty"`
Template string `xml:"template,attr"`
}
-func generateOpenSearchXML(config Config) {
- // Ensure that language is initialized in `main` before calling this function
+type Image struct {
+ Height int `xml:"height,attr"`
+ Width int `xml:"width,attr"`
+ Type string `xml:"type,attr"`
+ URL string `xml:",chardata"`
+}
+func generateOpenSearchXML(config Config) {
baseURL := addProtocol(config.Domain)
+
opensearch := OpenSearchDescription{
- Xmlns: "http://a9.com/-/spec/opensearch/1.1/",
- ShortName: Translate("site_name"),
- Description: Translate("site_description"),
- Tags: Translate("site_tags"),
+ Xmlns: "http://a9.com/-/spec/opensearch/1.1/",
+ ShortName: Translate("site_name"),
+ LongName: Translate("site_name") + " Search",
+ Description: Translate("site_description"),
+ Tags: Translate("site_tags"),
+ InputEncoding: "UTF-8",
+ OutputEncoding: "UTF-8",
+ Images: []Image{
+ {
+ Height: 512,
+ Width: 512,
+ Type: "image/svg+xml",
+ URL: fmt.Sprintf("%s/static/images/icon-512.svg", baseURL),
+ },
+ {
+ Height: 16,
+ Width: 16,
+ Type: "image/png",
+ URL: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAABYAAAAWAF42ktiAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAA2pJREFUOI1lk11oW3UYxp//ycnhpHqapmlssrZLkzRbu9qPrVYWxhTpsLPD4fRiDBmj6ubwZhfChCHSgjBB59B5McEboepWVkSLdQ432rVZ2s3B0jQtga7LcnKa9CNtlrT56EnO64XbqPrCe/c8P3hfnocRETZPMpm0hyPK52VGaa8gCCUMTMtkMwsg9CWTK1+2tbVlNuvZJgAfnA5d2eZ2HvQHguza9WHcvx+GXs+jtaUJBw90UonBsL6ykjjhdDp/ekogIhCR7sGDh1MLi0v01pFuKjHX0rPPOana3UqiqYYM5VvJaHPT+a8vkqqqxVAodPyx7x9AIDjTt5xYoVZPB4mmGhLLqqn5hZfo+o0R+uhMLz1TYadDh4/R4aMn6OPes5TP5wt+v383EYFlMpkqnuflD06dZn2XBmAyGZFYTuDke8cQmAqiproKjTsacPG77yEaRIQjCoZ+/oHcLoe/asuWndxcOPxhVJlnlwd+Ba/jsLt9F3gdh6bGevjGb0OZn4f31jgURcHcXBikaTj7xQVms1pbxsbGtnGCIHZ5fbehFgpQVRW/Df2BN15/DaNeH9SNHNwuB4auXntyLwDCqHcC2VyOLBZLB8frOCkiK2CPn9q1fx9cDjsWFuIg0rDdXQcdx2CzVYJAYGAACPH4kiYIQhUHsKLRWAqDKAIA9r/agc/OfQWtSPi09xOAAUVNQywWBwDs2tmMlqZGGI0SCoVCikulUtONDduRzeUAxqCqKgCGib/uQpJKsaEWwBiHfa+8DMY4+APTiMUXYS436dLp9AxXYbZ8s8fzIpwOO2zWSmSzeeyor0dLczNGb40DYGCcDge6OsEYQ7FYxPvvHkUqlUqPjIz8yYgI0WgsNhcOW/suDWDM68Px7rfBcTqo6gaICJJUCq9vAld+GUKdy4E7N69qsiyfc7vdp0FEmJ2dfX5tfb3w4+UBkipdJJpqSLLUkslWR2WVLjKUb6U3j3RT+95OisjRoizLgf7+fuFpEokIwVCoYzWZzAVnQvTOyVNUaq0jg9lOBrOdahva6fyFbym9tqY9jMiBwcHBin9F+clOTk6a7vknf08+epTP5zdImY/R0nJC0zSNolFl0R8InOnp6eE3e9h/6wwAw8PDvNVq3aPX6x2qquZXV1enPB7PFID/if8GRa7Q/nLxcNoAAAAASUVORK5CYII=",
+ },
+ },
URLs: []URL{
{
Type: "text/html",
+ Method: "get",
Template: fmt.Sprintf("%s/search?q={searchTerms}", baseURL),
},
{
@@ -48,6 +77,8 @@ func generateOpenSearchXML(config Config) {
}
defer file.Close()
+ file.WriteString(`` + "\n")
+
enc := xml.NewEncoder(file)
enc.Indent(" ", " ")
if err := enc.Encode(opensearch); err != nil {
diff --git a/proxy.go b/proxy.go
new file mode 100644
index 0000000..0f2a26a
--- /dev/null
+++ b/proxy.go
@@ -0,0 +1,270 @@
+package main
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/net/proxy"
+)
+
+// ProxyConfig holds configuration for a single proxy.
+type ProxyConfig struct {
+ Address string
+ Username string
+ Password string
+}
+
+// ProxyClient provides an HTTP client pool for proxies.
+type ProxyClient struct {
+ clients []*http.Client
+ lock sync.Mutex
+ index int
+}
+
+// Package-level proxy clients
+var (
+ metaProxyClient *ProxyClient
+ crawlerProxyClient *ProxyClient
+)
+
+// NewProxyClientPool creates a pool of HTTP clients with SOCKS5 proxies.
+func NewProxyClientPool(proxies []ProxyConfig, timeout time.Duration) (*ProxyClient, error) {
+ if len(proxies) == 0 {
+ return nil, fmt.Errorf("no proxies provided")
+ }
+
+ clients := make([]*http.Client, len(proxies))
+
+ for i, pc := range proxies {
+ var auth *proxy.Auth
+ if pc.Username != "" || pc.Password != "" {
+ auth = &proxy.Auth{
+ User: pc.Username,
+ Password: pc.Password,
+ }
+ }
+ dialer, err := proxy.SOCKS5("tcp", pc.Address, auth, proxy.Direct)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create SOCKS5 dialer for %s: %w", pc.Address, err)
+ }
+
+ transport := &http.Transport{Dial: dialer.Dial}
+ clients[i] = &http.Client{
+ Transport: transport,
+ Timeout: timeout,
+ }
+ }
+
+ return &ProxyClient{clients: clients}, nil
+}
+
+// Do sends an HTTP request using the next proxy in the pool.
+func (p *ProxyClient) Do(req *http.Request) (*http.Response, error) {
+ p.lock.Lock()
+ client := p.clients[p.index]
+ p.index = (p.index + 1) % len(p.clients)
+ p.lock.Unlock()
+ return client.Do(req)
+}
+
+func (p *ProxyClient) GetProxy() string {
+ p.lock.Lock()
+ defer p.lock.Unlock()
+
+ if len(p.clients) == 0 {
+ return ""
+ }
+
+ // Round-robin proxy retrieval
+ client := p.clients[p.index]
+ p.index = (p.index + 1) % len(p.clients)
+
+ // Assume each client has a proxy string saved
+ // Example implementation depends on how your proxies are configured
+ proxyTransport, ok := client.Transport.(*http.Transport)
+ if ok && proxyTransport.Proxy != nil {
+ proxyURL, _ := proxyTransport.Proxy(nil)
+ if proxyURL != nil {
+ return proxyURL.String()
+ }
+ }
+
+ return ""
+}
+
+// ParseProxies parses the proxy strings in the format ADDRESS:PORT or ADDRESS:PORT:USER:PASSWORD.
+func ParseProxies(proxyStrings []string) []ProxyConfig {
+ var proxies []ProxyConfig
+ for _, proxyStr := range proxyStrings {
+ parts := strings.Split(proxyStr, ":")
+ switch len(parts) {
+ case 2: // ADDRESS:PORT
+ proxies = append(proxies, ProxyConfig{
+ Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
+ })
+ case 4: // ADDRESS:PORT:USER:PASSWORD
+ proxies = append(proxies, ProxyConfig{
+ Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
+ Username: parts[2],
+ Password: parts[3],
+ })
+ default:
+ fmt.Printf("Invalid proxy format: %s\n", proxyStr)
+ }
+ }
+ return proxies
+}
+
+// InitProxies initializes the proxy clients for Meta and Crawler proxies.
+func InitProxies() {
+ // Initialize Meta Proxy Client
+ if config.MetaProxyEnabled {
+ metaProxies := ParseProxies(config.MetaProxies)
+ client, err := NewProxyClientPool(metaProxies, 30*time.Second)
+ if err != nil {
+ if config.MetaProxyStrict {
+ panic(fmt.Sprintf("Failed to initialize Meta proxies: %v", err))
+ }
+ fmt.Printf("Warning: Meta proxy initialization failed: %v\n", err)
+ }
+ metaProxyClient = client
+ }
+
+ // Initialize Crawler Proxy Client
+ if config.CrawlerProxyEnabled {
+ crawlerProxies := ParseProxies(config.CrawlerProxies)
+ client, err := NewProxyClientPool(crawlerProxies, 30*time.Second)
+ if err != nil {
+ if config.CrawlerProxyStrict {
+ panic(fmt.Sprintf("Failed to initialize Crawler proxies: %v", err))
+ }
+ fmt.Printf("Warning: Crawler proxy initialization failed: %v\n", err)
+ }
+ crawlerProxyClient = client
+ }
+}
+
+// Doer is an interface so we can accept *http.Client or *ProxyClient for requests.
+type Doer interface {
+ Do(*http.Request) (*http.Response, error)
+}
+
+// DoProxyRequest handles “try direct, then proxy if needed,” with retries if proxy is used.
+//
+// - strict: if true, always try proxy first if enabled; if not available, do one direct attempt
+// - enabled: whether this type of proxy is turned on
+// - retryCount: how many times to retry with the proxy
+// - proxyClient: the pool of proxy connections
+func DoProxyRequest(req *http.Request, strict bool, enabled bool, retryCount int, proxyClient *ProxyClient) (*http.Response, error) {
+ // 1) If !strict => try direct once first
+ if !strict {
+ resp, err := tryRequestOnce(req, http.DefaultClient)
+ if isSuccessful(resp, err) {
+ return resp, nil
+ }
+ // If direct fails => if proxy is enabled, retry
+ if enabled && proxyClient != nil {
+ resp, err = tryRequestWithRetry(req, proxyClient, retryCount)
+ if isSuccessful(resp, err) {
+ return resp, nil
+ }
+ return nil, fmt.Errorf("failed after direct & proxy attempts: %v", err)
+ }
+ return nil, fmt.Errorf("request failed direct, no valid proxy: %v", err)
+ }
+
+ // 2) If strict => if proxy is enabled, try it up to “retryCount”
+ if enabled && proxyClient != nil {
+ resp, err := tryRequestWithRetry(req, proxyClient, retryCount)
+ if isSuccessful(resp, err) {
+ return resp, nil
+ }
+ return nil, fmt.Errorf("failed after %d proxy attempts: %v", retryCount, err)
+ }
+
+ // If strict but no proxy => direct once
+ resp, err := tryRequestOnce(req, http.DefaultClient)
+ if isSuccessful(resp, err) {
+ return resp, nil
+ }
+ return nil, fmt.Errorf("direct request failed in strict mode, no proxy: %v", err)
+}
+
+// Helper Wrapper functions for DoProxyRequest()
+func DoMetaProxyRequest(req *http.Request) (*http.Response, error) {
+ return DoProxyRequest(
+ req,
+ config.MetaProxyStrict,
+ config.MetaProxyEnabled,
+ config.MetaProxyRetry,
+ metaProxyClient,
+ )
+}
+func DoCrawlerProxyRequest(req *http.Request) (*http.Response, error) {
+ return DoProxyRequest(
+ req,
+ config.CrawlerProxyStrict,
+ config.CrawlerProxyEnabled,
+ config.CrawlerProxyRetry,
+ metaProxyClient,
+ )
+}
+
+// tryRequestWithRetry tries the request up to "retries" times, waiting 200ms between attempts.
+func tryRequestWithRetry(req *http.Request, client Doer, retries int) (*http.Response, error) {
+ var resp *http.Response
+ var err error
+ for i := 1; i <= retries; i++ {
+ if resp != nil {
+ resp.Body.Close()
+ }
+ printDebug("Attempt %d of %d with proxy/client...", i, retries)
+ resp, err = tryRequestOnce(req, client)
+ if isSuccessful(resp, err) {
+ return resp, nil
+ }
+ time.Sleep(200 * time.Millisecond)
+ }
+ return resp, err
+}
+
+// tryRequestOnce sends a single request with the given client. If client is nil, uses default client.
+func tryRequestOnce(req *http.Request, client Doer) (*http.Response, error) {
+ if client == nil {
+ client = http.DefaultClient
+ }
+ resp, err := client.Do(req)
+ return resp, err
+}
+
+// isSuccessful checks if err==nil & resp != nil & resp.StatusCode in [200..299].
+func isSuccessful(resp *http.Response, err error) bool {
+ if err != nil || resp == nil {
+ return false
+ }
+ return resp.StatusCode >= 200 && resp.StatusCode < 300
+}
+
+// func main() {
+// config := loadConfig()
+
+// // Initialize proxies if enabled
+// if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
+// InitProxies()
+// }
+
+// // Example usage
+// if metaProxyClient != nil {
+// req, _ := http.NewRequest("GET", "https://example.com", nil)
+// resp, err := metaProxyClient.Do(req)
+// if err != nil {
+// fmt.Printf("Error using MetaProxyClient: %v\n", err)
+// } else {
+// fmt.Printf("Meta Proxy Response Status: %s\n", resp.Status)
+// resp.Body.Close()
+// }
+// }
+// }
diff --git a/run.bat b/run.bat
index eb3919d..e1bf056 100755
--- a/run.bat
+++ b/run.bat
@@ -5,7 +5,7 @@ rem Initialize variables
set SKIP_CONFIG=""
set PORT=""
set DOMAIN=""
-set BUILD_MODE=false
+set CONFIG_FILE=""
set BUILD_OUTPUT=qgato.exe
rem Parse arguments
@@ -23,13 +23,14 @@ if "%~1"=="--domain" (
shift
goto parse_args
)
-if "%~1"=="--skip-config-check" (
- set SKIP_CONFIG=--skip-config-check
+if "%~1"=="--config" (
+ set CONFIG_FILE=%~2
+ shift
shift
goto parse_args
)
-if "%~1"=="--build" (
- set BUILD_MODE=true
+if "%~1"=="--skip-config-check" (
+ set SKIP_CONFIG=--skip-config-check
shift
goto parse_args
)
@@ -41,46 +42,29 @@ exit /b 1
rem Use the current directory where the script is executed
pushd %~dp0
-rem Collect all .go files in the current directory excluding *_test.go
-set GO_FILES=
-for %%f in (*.go) do (
- echo %%f | findstr "_test.go" >nul
- if errorlevel 1 (
- set GO_FILES=!GO_FILES! %%f
- )
+rem Always delete and rebuild the binary
+echo Cleaning previous build...
+if exist "%BUILD_OUTPUT%" del "%BUILD_OUTPUT%"
+
+echo Building application...
+go build -ldflags="-s -w" -o "%BUILD_OUTPUT%" .
+if errorlevel 1 (
+ echo Build failed!
+ exit /b 1
)
+echo Build successful! Output: %CD%\%BUILD_OUTPUT%
-if "%BUILD_MODE%"=="true" (
- rem Build mode
- echo Building application...
- go build -o "%BUILD_OUTPUT%" !GO_FILES!
- if errorlevel 1 (
- echo Build failed!
- exit /b 1
- )
- echo Build successful! Output: %CD%\%BUILD_OUTPUT%
-) else (
- rem Check if the executable exists
- if not exist "%BUILD_OUTPUT%" (
- echo Executable not found. Building it first...
- go build -o "%BUILD_OUTPUT%" !GO_FILES!
- if errorlevel 1 (
- echo Build failed! Unable to run the application.
- exit /b 1
- )
- )
+rem Construct the command
+set CMD=%BUILD_OUTPUT% !SKIP_CONFIG!
+if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
+if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
+if not "%CONFIG_FILE%"=="" set CMD=!CMD! --config %CONFIG_FILE%
- rem Construct the command
- set CMD="%BUILD_OUTPUT% !SKIP_CONFIG!"
- if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
- if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
+rem Informative output
+echo Starting application with command: !CMD!
- rem Informative output
- echo Starting application with command: !CMD!
-
- rem Run the application
- call !CMD!
-)
+rem Run the built executable
+call !CMD!
rem Return to the original directory
popd
diff --git a/run.sh b/run.sh
index 2aeefad..cfdd84a 100755
--- a/run.sh
+++ b/run.sh
@@ -4,7 +4,9 @@
SKIP_CONFIG=""
PORT=""
DOMAIN=""
-BUILD_MODE=false
+CONFIG_FILE=""
+BUILD_ONLY=0
+PLATFORM="linux"
BUILD_OUTPUT="qgato"
# Parse arguments
@@ -18,12 +20,20 @@ while [ $# -gt 0 ]; do
DOMAIN=$2
shift 2
;;
- --skip-config-check)
- SKIP_CONFIG="--skip-config-check"
+ --config)
+ CONFIG_FILE=$2
+ shift 2
+ ;;
+ --platform)
+ PLATFORM=$2
+ shift 2
+ ;;
+ --build-only)
+ BUILD_ONLY=1
shift
;;
- --build)
- BUILD_MODE=true
+ --skip-config-check)
+ SKIP_CONFIG="--skip-config-check"
shift
;;
*)
@@ -36,36 +46,40 @@ done
# Get the directory of the script
SCRIPT_DIR=$(dirname "$0")
-# List all Go files in the script directory (excluding test files)
-GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print)
-
-if $BUILD_MODE; then
- # Build mode
- echo "Building application..."
- go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
- if [ $? -eq 0 ]; then
- echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
- else
- echo "Build failed!"
- exit 1
- fi
+# Set GOOS and output filename
+if [ "$PLATFORM" = "windows" ]; then
+ GOOS=windows
+ BUILD_OUTPUT="qgato.exe"
else
- # Run mode
- CMD="./$BUILD_OUTPUT $SKIP_CONFIG"
- [ -n "$PORT" ] && CMD="$CMD --port $PORT"
- [ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
-
- if [ ! -f "$SCRIPT_DIR/$BUILD_OUTPUT" ]; then
- echo "Executable not found. Building it first..."
- go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
- if [ $? -ne 0 ]; then
- echo "Build failed! Unable to run the application."
- exit 1
- fi
- fi
-
- echo "Starting application with command: $CMD"
-
- # Run the executable
- eval $CMD
+ GOOS=linux
+ BUILD_OUTPUT="qgato"
fi
+
+# Clean and build
+echo "Cleaning previous build..."
+rm -f "$SCRIPT_DIR/$BUILD_OUTPUT"
+
+echo "Building application for $PLATFORM..."
+GOOS=$GOOS go build -ldflags="-s -w" -o "$SCRIPT_DIR/$BUILD_OUTPUT" .
+if [ $? -eq 0 ]; then
+ echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
+else
+ echo "Build failed!"
+ exit 1
+fi
+
+# Skip execution if build-only
+if [ "$BUILD_ONLY" -eq 1 ]; then
+ exit 0
+fi
+
+# Construct the run command
+CMD="$SCRIPT_DIR/$BUILD_OUTPUT $SKIP_CONFIG"
+[ -n "$PORT" ] && CMD="$CMD --port $PORT"
+[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
+[ -n "$CONFIG_FILE" ] && CMD="$CMD --config $CONFIG_FILE"
+
+echo "Starting application with command: $CMD"
+
+# Run the built executable
+eval $CMD
diff --git a/static/css/black.css b/static/css/black.css
index 9661246..1738ec1 100644
--- a/static/css/black.css
+++ b/static/css/black.css
@@ -30,7 +30,7 @@
--green: #31b06e;
--search-button: #BABCBE;
-
+
--image-view: #161616;
--image-view-titlebar: #161616;
--view-image-color: #000000;
@@ -38,7 +38,7 @@
--fff: #fff;
--publish-info: #7f869e;
-
+
color-scheme: dark;
}
@@ -71,4 +71,4 @@
.view-image-search:hover {
box-shadow: 0 14px 28px rgba(0, 0, 0, 0.25), 0 10px 10px rgba(0, 0, 0, 0.22);
-}
+}
\ No newline at end of file
diff --git a/static/css/dark.css b/static/css/dark.css
index 0555cd3..3af919c 100644
--- a/static/css/dark.css
+++ b/static/css/dark.css
@@ -30,7 +30,7 @@
--green: #31b06e;
--search-button: #BABCBE;
-
+
--image-view: #161616;
--image-view-titlebar: #161616;
--view-image-color: #000000;
@@ -38,7 +38,7 @@
--fff: #fff;
--publish-info: #7f869e;
-
+
color-scheme: dark;
}
@@ -71,4 +71,4 @@
.view-image-search:hover {
box-shadow: 0 14px 28px rgba(0, 0, 0, 0.25), 0 10px 10px rgba(0, 0, 0, 0.22);
-}
+}
\ No newline at end of file
diff --git a/static/css/latte.css b/static/css/latte.css
index 58eee36..6d28327 100644
--- a/static/css/latte.css
+++ b/static/css/latte.css
@@ -25,7 +25,7 @@
--base: #1e1e2e;
--mantle: #181825;
--crust: #11111b;
-
+
--html-bg: var(--base);
--font-fg: var(--text);
--fg: var(--subtext0);
@@ -53,7 +53,7 @@
--highlight: var(--subtext1);
--search-button: var(--subtext0);
-
+
--image-view: var(--mantle);
--image-view-titlebar: var(--mantle);
--view-image-color: var(--crust);
@@ -61,7 +61,7 @@
--fff: var(--text);
--publish-info: var(--overlay2);
-
+
color-scheme: dark;
}
@@ -94,4 +94,4 @@
.view-image-search:hover {
box-shadow: 0 14px 28px rgba(0, 0, 0, 0.25), 0 10px 10px rgba(0, 0, 0, 0.22);
-}
+}
\ No newline at end of file
diff --git a/static/css/light.css b/static/css/light.css
index 656f89a..30677fd 100644
--- a/static/css/light.css
+++ b/static/css/light.css
@@ -28,7 +28,7 @@
--blue: #4285f4;
--green: #202124;
-
+
--image-view: #ffffff;
--image-view-titlebar: #ffffff;
--view-image-color: #f1f3f4;
@@ -69,4 +69,4 @@
.view-image-search:hover {
box-shadow: 0px 2px 4px rgba(0, 0, 0, 0.08) !important;
transition: all 0.3s cubic-bezier(.25, .8, .25, 1) !important;
-}
+}
\ No newline at end of file
diff --git a/static/css/mocha.css b/static/css/mocha.css
index 543332f..3b7c869 100644
--- a/static/css/mocha.css
+++ b/static/css/mocha.css
@@ -25,7 +25,7 @@
--base: #eff1f5;
--mantle: #e6e9ef;
--crust: #dce0e8;
-
+
--html-bg: var(--base);
--font-fg: var(--text);
--fg: var(--subtext0);
@@ -56,7 +56,7 @@
--green: var(--green);
--search-button: var(--subtext0);
-
+
--image-view: var(--mantle);
--image-view-titlebar: var(--mantle);
--view-image-color: var(--crust);
@@ -64,7 +64,7 @@
--fff: var(--text);
--publish-info: var(--overlay2);
-
+
color-scheme: light;
}
@@ -97,4 +97,4 @@
.view-image-search:hover {
box-shadow: 0 14px 28px rgba(0, 0, 0, 0.15), 0 10px 10px rgba(0, 0, 0, 0.12);
-}
+}
\ No newline at end of file
diff --git a/static/css/night.css b/static/css/night.css
index 0676672..41e910c 100644
--- a/static/css/night.css
+++ b/static/css/night.css
@@ -28,7 +28,7 @@
--blue: #8ab4f8;
--green: #31b06e;
-
+
--image-view: #0c0d0f;
--image-view-titlebar: #0c0d0f;
--view-image-color: #000000;
@@ -38,7 +38,7 @@
--search-button: #BABCBE;
--publish-info: #7f869e;
-
+
color-scheme: dark;
}
@@ -71,4 +71,4 @@
.view-image-search:hover {
box-shadow: 0 14px 28px rgba(0, 0, 0, 0.25), 0 10px 10px rgba(0, 0, 0, 0.22);
-}
+}
\ No newline at end of file
diff --git a/static/css/style-fixedwidth.css b/static/css/style-fixedwidth.css
index ce88cc4..64632eb 100644
--- a/static/css/style-fixedwidth.css
+++ b/static/css/style-fixedwidth.css
@@ -1,7 +1,10 @@
/* Ensure the body width is fixed and prevents resizing by the user */
body {
- overflow-x: hidden; /* Prevent horizontal scrolling by user */
- width: 100vw; /* Fix the width of the viewport */
- max-width: 100vw; /* Prevent page from extending wider than the viewport */
+ overflow-x: hidden;
+ /* Prevent horizontal scrolling by user */
+ width: 100vw;
+ /* Fix the width of the viewport */
+ max-width: 100vw;
+ /* Prevent page from extending wider than the viewport */
box-sizing: border-box;
}
\ No newline at end of file
diff --git a/static/css/style-fonts.css b/static/css/style-fonts.css
index c49eb14..9b9a915 100644
--- a/static/css/style-fonts.css
+++ b/static/css/style-fonts.css
@@ -31,5 +31,5 @@
font-family: 'Material Icons Round';
font-style: normal;
font-weight: 400;
- src: url('/static/fonts/material-icons-round-v108-latin-regular.woff2') format('woff2');
+ src: url('/static/fonts/MaterialIcons-Round.woff2') format('woff2');
}
\ No newline at end of file
diff --git a/static/css/style-imageloading.css b/static/css/style-imageloading.css
new file mode 100644
index 0000000..79cc585
--- /dev/null
+++ b/static/css/style-imageloading.css
@@ -0,0 +1,65 @@
+/* Image Loading Effect */
+.loading-image {
+ position: relative;
+ overflow: hidden;
+ background-color: var(--snip-background);
+ background-image: linear-gradient(90deg,
+ rgba(255, 255, 255, 0) 25%,
+ rgba(255, 255, 255, 0.15) 50%,
+ rgba(255, 255, 255, 0) 75%);
+ background-size: 200% 100%;
+ animation: image-wave 2s infinite linear;
+}
+
+/* Title Loading Effect */
+.title-loading {
+ position: relative;
+ overflow: hidden;
+ color: transparent !important;
+ background-color: var(--snip-background);
+ min-height: 1.2em;
+ width: 80%;
+ margin: 0 auto;
+ top: 2px;
+ border-radius: 6px;
+}
+
+.title-loading::after {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background: linear-gradient(90deg,
+ transparent 25%,
+ rgba(255, 255, 255, 0.25) 50%,
+ transparent 75%);
+ background-size: 200% 100%;
+ animation: title-wave 2.5s infinite linear;
+}
+
+/* Animations */
+@keyframes image-wave {
+ 0% {
+ background-position: -100% 0;
+ /* Start off-screen left */
+ }
+
+ 100% {
+ background-position: 100% 0;
+ /* End off-screen right */
+ }
+}
+
+@keyframes title-wave {
+ 0% {
+ background-position: -100% 0;
+ /* Start off-screen left */
+ }
+
+ 100% {
+ background-position: 100% 0;
+ /* End off-screen right */
+ }
+}
\ No newline at end of file
diff --git a/static/css/style-imageviewer.css b/static/css/style-imageviewer.css
index ac6874a..f9e44ed 100644
--- a/static/css/style-imageviewer.css
+++ b/static/css/style-imageviewer.css
@@ -1,12 +1,15 @@
/* Image Viewer Overlay */
#image-viewer-overlay {
position: fixed;
- top: 105px;
+ top: 105px;
right: 0;
- width: 33%; /* Occupies the right space for the viewer */
- height: calc(100% - 105px); /* Adjust height */
+ width: 33%;
+ /* Occupies the right space for the viewer */
+ height: calc(100% - 105px);
+ /* Adjust height */
z-index: 999;
- display: none; /* Initially hidden */
+ display: none;
+ /* Initially hidden */
/* Added transition for opening animation */
/* transform: translateX(100%);
@@ -18,8 +21,10 @@
position: fixed;
top: 105px;
right: 0;
- width: 33%; /* Match the width of the overlay */
- height: calc(100% - 105px); /* Adjust height */
+ width: 33%;
+ /* Match the width of the overlay */
+ height: calc(100% - 105px);
+ /* Adjust height */
background: var(--search-bg);
padding: 20px;
box-sizing: border-box;
@@ -33,6 +38,7 @@
#viewer-image {
max-width: 100%;
max-height: 60vh;
+ border-radius: 5px;
}
/* Viewer Title */
@@ -57,7 +63,8 @@
display: flex;
flex-direction: row;
align-items: center;
- gap: 5px; /* Add spacing between buttons */
+ gap: 5px;
+ /* Add spacing between buttons */
}
#viewer-close-button,
@@ -85,7 +92,8 @@
/* Adjust the images container when the viewer is visible */
.images {
- margin-right: 33%; /* Reserve space for the image viewer */
+ margin-right: 33%;
+ /* Reserve space for the image viewer */
}
/* Clickable Elements */
@@ -93,6 +101,15 @@
cursor: pointer;
}
+
+
+.search-type-icons {
+ display: flex;
+ justify-content: center;
+ flex-wrap: wrap;
+ gap: 32px;
+}
+
/* Viewer Image Link */
#viewer-image-link {
text-decoration: none;
@@ -101,13 +118,13 @@
/* View Image Container */
#viewer-image-container {
- background-color: var(--view-image-color);
+ background-color: #0000;
width: 100%;
height: auto;
display: flex;
justify-content: center;
align-items: center;
- margin-top: 50px;
+ margin-top: 20px;
}
/* Full Size and Proxy Size Links */
@@ -153,14 +170,24 @@
}
/* Responsive Design */
-@media only screen and (max-width: 750px) {
+@media only screen and (max-width: 880px) {
#image-viewer {
width: 100%;
- height: 77%;
- margin-top: -33px;
+ height: 100% !important;
+ margin-top: 28px;
margin-right: 0%;
border-top-right-radius: 0px;
border-top-left-radius: 0px;
+ padding-top: 10px;
+ padding-bottom: 10px;
+ }
+
+ .search-type-icons {
+ gap: 16px;
+ }
+
+ #viewer-image-container {
+ margin-top: 5px;
}
#viewer-image {
@@ -168,6 +195,7 @@
}
.images {
- margin-right: 0; /* No reserved space on smaller screens */
+ margin-right: 0;
+ /* No reserved space on smaller screens */
}
-}
+}
\ No newline at end of file
diff --git a/static/css/style-instantanswer.css b/static/css/style-instantanswer.css
new file mode 100644
index 0000000..77bd088
--- /dev/null
+++ b/static/css/style-instantanswer.css
@@ -0,0 +1,310 @@
+*, *::before, *::after {
+ box-sizing: border-box;
+}
+
+.instant-container {
+ position: absolute;
+ top: 140px;
+ right: 175px;
+ width: 500px;
+ z-index: 1;
+}
+
+.instant-box {
+ border: 1px solid var(--snip-border);
+ background-color: var(--snip-background);
+ border-radius: 8px;
+ padding: 15px;
+ margin-bottom: 20px;
+ box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
+}
+
+.instant-box h3 {
+ margin-top: 0;
+ color: var(--highlight);
+ font-size: 20px;
+ border-bottom: 1px solid var(--border);
+ padding-bottom: 10px;
+ text-decoration: none;
+}
+
+.instant-result {
+ margin: 15px 0;
+ font-size: 16px;
+ line-height: 1.5;
+}
+
+.calc-container {
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+ margin-top: 15px;
+}
+
+.calc-input {
+ box-sizing: border-box;
+ width: 100%;
+ padding: 10px;
+ font-size: 18px;
+ border: 1px solid var(--border);
+ border-radius: 4px;
+ background-color: var(--search-bg);
+ color: var(--fg);
+}
+
+.calc-buttons {
+ display: grid;
+ grid-template-columns: repeat(4, 1fr);
+ gap: 5px;
+}
+
+.calc-buttons button {
+ padding: 10px;
+ font-size: 18px;
+ border: 1px solid var(--border);
+ border-radius: 4px;
+ background-color: var(--button);
+ color: var(--fg);
+ cursor: pointer;
+ transition: background-color 0.2s;
+}
+
+.calc-buttons button:hover {
+ background-color: var(--search-select);
+}
+
+.calc-result {
+ margin-top: 10px;
+ padding: 10px;
+ font-size: 20px;
+ border: 1px solid var(--border);
+ border-radius: 4px;
+ background-color: var(--snip-background);
+}
+
+.calc-history {
+ display: flex;
+ flex-direction: column-reverse;
+ font-family: monospace;
+ font-size: 16px;
+ color: var(--fg);
+ background-color: var(--search-bg);
+ border: 1px solid var(--border);
+ padding: 10px;
+ border-radius: 4px;
+ height: 120px;
+ overflow-y: auto;
+ white-space: pre-line;
+ overflow-anchor: none;
+}
+
+.calc-buttons {
+ display: grid;
+ grid-template-columns: repeat(4, 1fr);
+ gap: 8px;
+}
+
+.calc-buttons button {
+ padding: 20px;
+ font-size: 20px;
+ border-radius: 6px;
+ background-color: var(--search-bg);
+ border: 1px solid var(--border);
+ color: var(--fg);
+}
+
+.calc-buttons .equals {
+ background-color: #2ecc71;
+ color: #fff;
+ font-weight: bold;
+}
+
+.calc-buttons .equals:hover {
+ background-color: #27ae60;
+}
+
+.weather {
+ padding: 0;
+ background: none;
+ border-radius: 0;
+ box-shadow: none;
+ color: var(--snip-text);
+ min-width: 0;
+ max-width: none;
+ margin: 0;
+}
+
+.weather-header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ font-size: 1.13em;
+ font-weight: 500;
+ margin-bottom: 6px;
+}
+
+.weather-location {
+ opacity: 0.82;
+}
+
+.weather-temp {
+ font-size: 2.3em;
+ font-weight: 700;
+ color: var(--blue); /* not sure if using "var(--blue);" is the right idea */
+ letter-spacing: -1px;
+}
+
+.weather-deg {
+ font-size: 0.5em;
+ font-weight: 500;
+ vertical-align: super;
+ opacity: 0.5;
+}
+
+.weather-current {
+ display: flex;
+ align-items: center;
+ margin-bottom: 12px;
+ gap: 10px;
+ padding: 12px 14px 10px 14px;
+}
+
+.weather-icon {
+ font-size: 1.7em;
+ margin-right: 7px;
+}
+
+.weather-cond {
+ font-size: 1.09em;
+ font-weight: 500;
+ color: var(--highlight);
+ margin-right: 12px;
+ display: flex;
+ align-items: center;
+ gap: 5px;
+}
+
+.weather-details {
+ font-size: 0.97em;
+ opacity: 0.82;
+ margin-left: auto;
+}
+
+.weather-details span {
+ color: var(--fg);
+}
+
+.weather-forecast {
+ display: flex;
+ gap: 7px;
+ margin-top: 9px;
+}
+
+.weather-forecast-day {
+ flex: 1 1 0;
+ min-width: 0;
+ background: var(--search-bg);
+ border-radius: 8px;
+ padding: 8px 6px;
+ text-align: center;
+ color: var(--font-fg);
+ box-shadow: 0 1px 3px color-mix(in srgb, var(--search-bg), black 15%);
+}
+
+.weather-forecast-date {
+ font-size: 0.97em;
+ font-weight: 500;
+ opacity: 0.82;
+ margin-bottom: 2px;
+ color: var(--highlight);
+}
+
+.weather-forecast-temps {
+ font-size: 1.13em;
+ font-weight: 600;
+ color: var(--blue);
+}
+
+.weather-forecast-cond {
+ font-size: 0.98em;
+ opacity: 0.83;
+ color: var(--fg);
+}
+
+
+.weather-current {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ margin-bottom: 12px;
+ gap: 8px;
+}
+
+.weather-current-left {
+ display: flex;
+ align-items: center;
+ gap: 7px;
+}
+
+.weather-icon {
+ font-size: 1.7em;
+}
+
+.weather-cond {
+ font-size: 1.08em;
+ font-weight: 500;
+ color: var(--highlight);
+}
+
+.weather-current-right {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+}
+
+.weather-detail {
+ font-size: 0.98em;
+ color: var(--blue);
+ opacity: 0.85;
+ font-weight: 500;
+}
+
+/* Responsive adjustments */
+@media only screen and (max-width: 1450px) {
+ .instant-container {
+ right: 75px;
+ }
+}
+
+@media only screen and (max-width: 1350px) {
+ .instant-container {
+ right: 5px;
+ }
+}
+
+@media only screen and (max-width: 1290px) {
+ .instant-container {
+ position: relative;
+ top: 0;
+ right: 0;
+ margin-left: 175px;
+ margin-top: 20px;
+ }
+}
+
+@media only screen and (max-width: 880px) {
+ .instant-container {
+ width: 90%;
+ margin-left: 20px;
+ }
+}
+
+@media (max-width: 540px) {
+ .weather-forecast {
+ gap: 4px;
+ }
+ .weather-forecast-day {
+ padding: 6px 2px;
+ font-size: 0.95em;
+ }
+}
diff --git a/static/css/style-loadingcircle.css b/static/css/style-loadingcircle.css
new file mode 100644
index 0000000..ce65c0b
--- /dev/null
+++ b/static/css/style-loadingcircle.css
@@ -0,0 +1,32 @@
+.favicon-wrapper {
+ position: relative;
+ display: inline-block;
+ width: 16px;
+ height: 16px;
+}
+
+.favicon-wrapper.loading img {
+ visibility: hidden;
+ /* hide placeholder */
+}
+
+.favicon-wrapper.loading::after {
+ content: "";
+ position: absolute;
+ top: 50%;
+ left: 50%;
+ width: 14px;
+ height: 14px;
+ margin: -8px 0 0 -8px;
+ border: 2px solid var(--html-bg);
+ border-top-color: var(--fg);
+ border-radius: 50%;
+ animation: spin 0.7s linear infinite;
+ z-index: 2;
+}
+
+@keyframes spin {
+ to {
+ transform: rotate(360deg);
+ }
+}
\ No newline at end of file
diff --git a/static/css/style-loadingindicator.css b/static/css/style-loadingindicator.css
new file mode 100644
index 0000000..5068c0c
--- /dev/null
+++ b/static/css/style-loadingindicator.css
@@ -0,0 +1,69 @@
+.message-bottom-right {
+ opacity: 0;
+ pointer-events: none;
+ transition: opacity 0.3s ease-in-out;
+ align-items: center;
+ justify-content: center;
+ position: fixed;
+ bottom: 20px;
+ right: 20px;
+ background-color: var(--search-bg);
+ color: var(--text-color);
+ padding: 10px;
+ border-radius: 5px;
+ z-index: 1000;
+ text-align: center;
+ flex-direction: column;
+ border: 1px solid var(--border);
+ box-shadow: 0 0 10px var(--box-shadow);
+}
+
+.message-bottom-right.visible {
+ opacity: 1;
+ pointer-events: auto;
+}
+
+@keyframes bounce {
+
+ 0%,
+ 100% {
+ transform: translateY(0);
+ }
+
+ 30% {
+ transform: translateY(-10px);
+ }
+
+ 50% {
+ transform: translateY(0);
+ }
+
+ 70% {
+ transform: translateY(-5px);
+ }
+
+ 85% {
+ transform: translateY(0);
+ }
+
+ 95% {
+ transform: translateY(-2px);
+ }
+}
+
+.dot {
+ display: inline-block;
+ animation: bounce 1.5s infinite;
+}
+
+.dot:nth-child(2) {
+ animation-delay: 0.1s;
+}
+
+.dot:nth-child(3) {
+ animation-delay: 0.2s;
+}
+
+.dot:nth-child(4) {
+ animation-delay: 0.3s;
+}
\ No newline at end of file
diff --git a/static/css/style-map.css b/static/css/style-map.css
new file mode 100644
index 0000000..afdc8af
--- /dev/null
+++ b/static/css/style-map.css
@@ -0,0 +1,134 @@
+/* Unable to find ... message */
+.message {
+ position: absolute;
+ bottom: 20px;
+ left: 50%;
+ transform: translateX(-50%);
+ padding: 10px;
+ background-color: var(--search-bg);
+ border: 1px solid var(--border);
+ border-radius: 5px;
+ box-shadow: 0 0 10px var(--box-shadow);
+ z-index: 1000;
+ width: auto;
+ max-width: 80%;
+ text-align: center;
+ color: var(--text-color);
+}
+
+/* Map container */
+#map {
+ height: calc(100% - 65px);
+ width: 100%;
+ top: 65px;
+}
+
+.leaflet-control-layers-toggle {
+ background-color: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+}
+
+.leaflet-bar a,
+.leaflet-bar a:hover {
+ background-color: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+}
+
+.leaflet-popup-content-wrapper,
+.leaflet-popup-tip {
+ background: var(--html-bg) !important;
+ color: var(--text-color) !important;
+ border: 1px solid var(--border) !important;
+}
+
+.leaflet-popup-content-wrapper a {
+ color: var(--link) !important;
+}
+
+.leaflet-popup-content-wrapper a:hover {
+ text-decoration: underline;
+}
+
+/* Leaflet control buttons */
+.leaflet-control-locate,
+.leaflet-control-layers-toggle {
+ background-color: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+}
+
+.leaflet-bar a,
+.leaflet-bar a:focus,
+.leaflet-bar a:active,
+.leaflet-bar a:hover {
+ background-color: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+ box-shadow: 0 2px 12px rgba(0,0,0,0.24) !important;
+ transition: background-color 0.2s;
+}
+
+.leaflet-control-layers-toggle,
+.leaflet-control-layers-expanded {
+ background-color: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+}
+
+.leaflet-bar a:hover,
+.leaflet-bar a:focus {
+ background-color: var(--search-select) !important;
+}
+
+.leaflet-popup-content-wrapper,
+.leaflet-popup-tip {
+ background: var(--html-bg) !important;
+ color: var(--text-color) !important;
+ border: 1px solid var(--border) !important;
+}
+
+.leaflet-popup-content-wrapper a {
+ color: var(--link) !important;
+}
+
+.leaflet-popup-content-wrapper a:hover {
+ text-decoration: underline;
+}
+
+/* Leaflet attribution control */
+.leaflet-control-attribution {
+ background: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+}
+
+.leaflet-control-attribution a {
+ color: var(--link) !important;
+}
+
+.leaflet-control-attribution a:hover {
+ text-decoration: underline !important;
+}
+
+/* Leaflet scale control */
+.leaflet-control-scale {
+ background: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+ text-shadow: 1px 1px 2px var(--border) !important;
+}
+
+.leaflet-control-scale-line {
+ background: var(--button) !important;
+ border: 1px solid var(--border) !important;
+ color: var(--fg) !important;
+ text-shadow: 1px 1px 2px var(--border) !important;
+}
+
+@media only screen and (max-width: 880px) {
+ #map {
+ top: 130px !important;
+ }
+}
\ No newline at end of file
diff --git a/static/css/style-menu.css b/static/css/style-menu.css
index 95be6cf..c311764 100644
--- a/static/css/style-menu.css
+++ b/static/css/style-menu.css
@@ -165,37 +165,45 @@
/* Spacing & sizing */
padding: 32px;
- max-width: 600px; /* Increased width */
- max-height: 80vh; /* Optional: restrict height to 80% of viewport */
- overflow-y: auto; /* Enable scrolling if content exceeds height */
+ max-width: 600px;
+ /* Increased width */
+ max-height: 80vh;
+ /* Optional: restrict height to 80% of viewport */
+ overflow-y: auto;
+ /* Enable scrolling if content exceeds height */
color: var(--font-fg);
}
#aboutQGatoModal #close-button {
position: absolute;
top: 12px;
- right: 12px; /* Moved close button to top-right */
+ right: 12px;
+ /* Moved close button to top-right */
}
#aboutQGatoModal .modal-content {
text-align: center;
- margin-top: 20px; /* Adjusted spacing */
+ margin-top: 20px;
+ /* Adjusted spacing */
}
/* Logo */
#aboutQGatoModal .modal-content img {
- width: 100px; /* Increased logo size */
+ width: 100px;
+ /* Increased logo size */
margin-bottom: 16px;
}
/* Headings, paragraphs, etc. */
#aboutQGatoModal .modal-content h2 {
- font-size: 2rem; /* Larger heading */
+ font-size: 2rem;
+ /* Larger heading */
margin: 8px 0;
}
#aboutQGatoModal .modal-content p {
- font-size: 1.1rem; /* Larger paragraph text */
+ font-size: 1.1rem;
+ /* Larger paragraph text */
margin: 12px 0;
}
@@ -213,8 +221,10 @@
color: var(--font-fg);
border: 1px solid var(--border);
border-radius: 6px;
- padding: 12px 16px; /* Larger button padding */
- font-size: 1rem; /* Larger button text */
+ padding: 12px 16px;
+ /* Larger button padding */
+ font-size: 1rem;
+ /* Larger button text */
cursor: pointer;
transition: border 0.3s ease, background-color 0.3s ease, color 0.3s ease;
}
@@ -225,7 +235,8 @@
/* Close Button Style */
.cloase-btn {
- font-size: 1.5rem; /* Larger close button */
+ font-size: 1.5rem;
+ /* Larger close button */
color: var(--search-button);
border-radius: 50%;
padding: 8px;
diff --git a/static/css/style-music.css b/static/css/style-music.css
new file mode 100644
index 0000000..01ed5cc
--- /dev/null
+++ b/static/css/style-music.css
@@ -0,0 +1,117 @@
+/* Music Results Styling */
+.result-item.music-item {
+ display: flex;
+ gap: 16px;
+ margin-bottom: 24px;
+ align-items: flex-start;
+}
+
+.music-thumbnail {
+ position: relative;
+ flex: 0 0 160px;
+ aspect-ratio: 1;
+ border-radius: 5px;
+ overflow: hidden;
+ background: var(--placeholder-bg);
+}
+
+.music-thumbnail img {
+ width: 100%;
+ height: 100%;
+ object-fit: cover;
+ transition: transform 0.2s ease;
+}
+
+.music-thumbnail:hover img {
+ transform: scale(1.03);
+}
+
+.thumbnail-placeholder {
+ width: 100%;
+ height: 100%;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ background: var(--placeholder-bg);
+ color: var(--placeholder-icon);
+}
+
+.thumbnail-placeholder .material-icons-round {
+ font-size: 2.5rem;
+}
+
+.duration-overlay {
+ position: absolute;
+ bottom: 2px;
+ right: 2px;
+ background: rgba(0, 0, 0, 0.8);
+ color: white;
+ padding: 4px 8px;
+ border-radius: 3px;
+ font-size: 12px;
+ font-weight: 500;
+ backdrop-filter: blur(2px);
+}
+
+.music-info {
+ flex: 1;
+ min-width: 0;
+ padding-top: 4px;
+}
+
+.music-title {
+ margin: 0 0 8px 0;
+ font-size: 18px;
+ line-height: 1.3;
+ font-weight: 500;
+ color: var(--text-primary);
+}
+
+.music-title:hover {
+ text-decoration: underline;
+}
+
+.music-meta {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ font-size: 14px;
+ color: var(--text-secondary);
+}
+
+.artist {
+ color: var(--accent-color);
+ font-weight: 500;
+}
+
+.meta-separator {
+ color: var(--border-color);
+ font-size: 12px;
+}
+
+/* Responsive Design */
+@media (max-width: 768px) {
+ .music-thumbnail {
+ flex-basis: 120px;
+ }
+
+ .music-title {
+ font-size: 16px;
+ }
+
+ .music-meta {
+ font-size: 13px;
+ gap: 6px;
+ }
+}
+
+@media (max-width: 480px) {
+ .music-thumbnail {
+ flex-basis: 100px;
+ }
+
+ .duration-overlay {
+ font-size: 11px;
+ padding: 3px 6px;
+ }
+}
\ No newline at end of file
diff --git a/static/css/style-privacy.css b/static/css/style-privacy.css
index 5cfef4b..9037102 100644
--- a/static/css/style-privacy.css
+++ b/static/css/style-privacy.css
@@ -92,4 +92,4 @@
.privacy-section .cookie-table {
margin-left: auto;
margin-right: auto;
-}
+}
\ No newline at end of file
diff --git a/static/css/style-search.css b/static/css/style-search.css
index 446f3d2..9505517 100644
--- a/static/css/style-search.css
+++ b/static/css/style-search.css
@@ -1,5 +1,6 @@
/* General Styles */
-body, html {
+body,
+html {
margin: 0;
padding: 0;
background-color: var(--html-bg);
@@ -8,7 +9,8 @@ body, html {
color: var(--text-color);
}
-button, p {
+button,
+p {
font-family: 'Inter', Arial, Helvetica, sans-serif;
font-weight: 400;
}
@@ -101,13 +103,15 @@ body.menu-open {
text-align: left;
width: 100%;
position: absolute;
- left: -1px; /* this is so curesed */
+ left: -1px;
+ /* this is so curesed */
top: 100%;
background-color: var(--search-bg-input);
border-bottom-left-radius: 22px;
border-bottom-right-radius: 22px;
border: 1px solid var(--search-bg-input-border);
- border-top: none; /* No top border to avoid double borders with the input */
+ border-top: none;
+ /* No top border to avoid double borders with the input */
z-index: 900;
}
@@ -159,8 +163,10 @@ body.menu-open {
text-align: center;
display: flex;
flex-direction: column;
- justify-content: center; /* Center vertically */
- height: 100vh; /* Make it full viewport height */
+ justify-content: center;
+ /* Center vertically */
+ height: 100vh;
+ /* Make it full viewport height */
}
/* Search Type Icons */
@@ -171,7 +177,8 @@ body.menu-open {
margin-top: 30px;
gap: 20px;
/* Allow transform movement */
- transition: transform 0.3s ease; /* Smooth movement when suggestions appear */
+ transition: transform 0.3s ease;
+ /* Smooth movement when suggestions appear */
position: relative;
}
@@ -242,7 +249,8 @@ body.menu-open {
z-index: 999;
width: 50px;
height: 50px;
- font-size: 36px; /* Not working */
+ font-size: 36px;
+ /* Not working */
}
.side-nav .closebtn:hover {
@@ -259,11 +267,13 @@ body.menu-open {
right: 0;
background-color: var(--html-bg);
overflow-x: hidden;
- transition: width 0.4s ease-in-out; /* Does this even change anything? cant really tell */
+ transition: width 0.4s ease-in-out;
+ /* Does this even change anything? cant really tell */
z-index: 1000;
- box-shadow: -2px 0 5px rgba(0,0,0,0.5);
+ box-shadow: -2px 0 5px rgba(0, 0, 0, 0.5);
padding-top: 70px;
- overflow-x: hidden; /* Prevents horizontal overflow */
+ overflow-x: hidden;
+ /* Prevents horizontal overflow */
}
.side-nav:hover .closebtn:hover {
@@ -322,7 +332,7 @@ body.menu-open::before {
left: 0;
width: 100%;
height: 100%;
- background: rgba(0,0,0,0.5);
+ background: rgba(0, 0, 0, 0.5);
z-index: 998;
}
@@ -337,11 +347,13 @@ body.menu-open::before {
.wrapper {
width: 90%;
}
+
.logo-container svg {
width: 75%;
max-width: 90%;
min-width: 25%;
}
+
.side-nav {
max-width: 90%;
}
@@ -378,7 +390,8 @@ p {
line-height: 1.58;
}
-input, button {
+input,
+button {
outline: none;
}
@@ -387,11 +400,14 @@ body.menu-open {
}
/* Prevent horizontal scrolling on the page */
-html, body {
- overflow-x: hidden; /* Disables horizontal scrollbar */
+html,
+body {
+ overflow-x: hidden;
+ /* Disables horizontal scrollbar */
margin: 0;
padding: 0;
- max-width: 100vw; /* Limits content to viewport width */
+ max-width: 100vw;
+ /* Limits content to viewport width */
}
/*
@@ -400,4 +416,4 @@ html, body {
outline: none;
border: 1px solid var(--font-fg);
box-shadow: none;
-} */
+} */
\ No newline at end of file
diff --git a/static/css/style-settings.css b/static/css/style-settings.css
index 6719fa7..73e73e8 100644
--- a/static/css/style-settings.css
+++ b/static/css/style-settings.css
@@ -7,6 +7,7 @@
object-position: center;
vertical-align: bottom;
border-radius: 4px;
+ position: relative;
}
.theme-link {
@@ -16,7 +17,8 @@
width: 48%;
margin-bottom: 10px;
height: 150px;
- position: relative; /* Make it possible to position the tooltip */
+ position: relative;
+ /* Make it possible to position the tooltip */
}
.theme-link img {
@@ -33,21 +35,34 @@
} */
.theme-tooltip {
- display: none; /* Hidden by default */
position: absolute;
- bottom: 10px; /* Position at the bottom of the image */
left: 50%;
- transform: translateX(-50%);
- background-color: rgba(0, 0, 0, 0.7); /* Semi-transparent background */
- color: #fff;
- padding: 5px 10px;
- border-radius: 4px;
+ bottom: 14px;
+ transform: translateX(-50%) translateY(100%);
+ background: var(--search-bg);
+ color: var(--font-fg);
font-size: 14px;
+ padding: 7px 14px;
+ border-radius: 6px;
+ box-shadow: 0 5px 24px 0 rgba(0,0,0,0.25);
+ opacity: 0;
+ pointer-events: none;
+ z-index: 5;
white-space: nowrap;
+ transition: opacity 0.22s cubic-bezier(.7, .1, .6, 1), transform 0.22s cubic-bezier(.7, .1, .6, 1);
}
-.theme-link:hover .theme-tooltip {
- display: block; /* Show tooltip on hover */
+.theme-link:hover .theme-tooltip,
+.theme-link:focus .theme-tooltip {
+ opacity: 1;
+ transform: translateX(-50%) translateY(0);
+ pointer-events: auto;
+}
+
+.theme-link.show-tooltip .theme-tooltip {
+ opacity: 1;
+ transform: translateX(-50%) translateY(0);
+ pointer-events: auto;
}
.themes-settings-menu {
@@ -62,7 +77,7 @@
}
#searchLanguageSelect,
-#safeSearchSelect,
+#safeSearchSelect,
#siteLanguageSelect {
border-radius: 4px;
padding: 6px;
@@ -75,11 +90,12 @@
cursor: pointer;
transition: all 0.3s ease;
text-align: center;
- box-sizing: border-box; /* Ensures consistent width with padding */
+ box-sizing: border-box;
+ /* Ensures consistent width with padding */
}
#searchLanguageSelect:hover,
-#safeSearchSelect:hover,
+#safeSearchSelect:hover,
#siteLanguageSelect:hover {
border: 1px solid #5f6368;
/* background-color: var(--button-hover); */
@@ -88,7 +104,7 @@
.save.save-settings-page {
padding: 6px;
width: 160px;
- height: 40px;
+ height: 40px;
}
/* Ensure correct alignment */
@@ -98,9 +114,9 @@
justify-content: space-between;
}
-.settings-row select,
+.settings-row select,
.settings-row button {
width: 160px;
height: 40px;
box-sizing: border-box;
-}
+}
\ No newline at end of file
diff --git a/static/css/style.css b/static/css/style.css
index e4b1cd6..9561c86 100644
--- a/static/css/style.css
+++ b/static/css/style.css
@@ -16,7 +16,7 @@
white-space: nowrap;
word-wrap: normal;
direction: ltr;
-
+
/* Support for all WebKit browsers. */
-webkit-font-feature-settings: 'liga';
-webkit-font-smoothing: antialiased;
@@ -28,8 +28,7 @@
font-feature-settings: 'liga';
}
-#search-wrapper-ico,
-#clearSearch {
+#search-wrapper-ico {
background: none;
border: none;
color: var(--fg);
@@ -39,8 +38,7 @@
cursor: pointer;
}
-#search-wrapper-ico:hover,
-#clearSearch:hover {
+#search-wrapper-ico:hover {
transition: all .3s ease;
color: var(--font-fg);
}
@@ -55,18 +53,17 @@
margin-right: 0px;
}
-#clearSearch {
- right: 40px;
- visibility: hidden;
-}
-
-.fetched_dif {
+.fetched_dif_videos {
margin-top: 110px !important;
}
-.fetched_img {
- margin-left: 1.2% !important;
- left: 0px !important;
+.fetched_dif_files {
+ margin-top: 10px !important;
+}
+
+
+.fetched_dif_images {
+ margin-top: 10px;
}
.clean {
@@ -110,122 +107,68 @@ html {
font-size: 16px;
}
-#info_font {
- font-size: 15px;
-}
-
-.calc {
- height: fit-content;
- width: fit-content;
- position: relative;
- left: 175px;
- border: 1px solid var(--snip-border);
- background-color: var(--snip-background);
- border-radius: 8px;
- display: flex;
- flex-direction: column;
- align-items: center;
- justify-content: flex-start;
-}
-
-.calc-btn-style {
- background-color: var(--html-bg) !important;
-}
-
-.calc-input {
- width: 90%;
- height: 10%;
- background-color: var(--search-bg);
- border: 1px solid var(--snip-border);
- border-radius: 8px;
- padding: 15px;
- margin-top: 8px;
- text-align: right;
- max-width: 48em;
- line-height: 1.58;
- font-size: 22px;
- color: var(--fg);
- letter-spacing: normal;
- overflow: hidden;
-}
-
-.calc-btn {
- max-width: 48em;
- line-height: 50px;
- font-size: 22px;
- color: var(--fg);
- letter-spacing: normal;
- border-radius: 8px;
- background-color: var(--search-bg);
- border: 1px solid var(--snip-border);
- height: 50px;
- margin: auto;
- margin: 4px;
- width: 80px;
- text-align: center;
-}
-
-.calc-btn-2 {
- max-width: 48em;
- line-height: 50px;
- font-size: 22px;
- color: var(--fff);
- letter-spacing: normal;
- border-radius: 8px;
- background-color: var(--font-fg);
- height: 50px;
- margin: auto;
- margin: 4px;
- width: 80px;
- text-align: center;
-}
-
-.calc-btns {
- display: grid;
- grid-template-columns: repeat(4, 90px);
- width: 100%;
- justify-content: center;
- padding: 4px;
-}
-
-.calc-pos-absolute {
- position: absolute;
- margin-top: 60px;
- display: flex;
- flex-direction: row;
- flex-wrap: wrap;
-}
-
-.prev_calculation {
- opacity: 0.5;
- font-size: 14px;
-}
-
-.emoji-code {
- font-family: Arial, Helvetica, sans-serif;
-}
-
/* container */
+
.images {
+ margin: 0;
display: flex;
flex-wrap: wrap;
- justify-content: center;
+ margin-right: 35.4%;
+ position: relative;
}
.image {
- flex: 1 1 auto;
- padding: .5rem .5rem 2rem .5rem;
+ flex-grow: 1;
+ padding: .5rem .5rem 3rem .5rem;
margin: .25rem;
border-radius: 12px;
height: 10rem;
- max-width: 20rem;
- min-width: 3rem;
transition: outline 0.3s ease, transform 0.3s ease;
}
-.image_selected {
- outline: 1px solid var(--border);
- transform: scale(1.05);
+.image>a {
+ position: relative;
+ text-decoration: none;
+}
+
+.image img {
+ transition: .3s;
+}
+
+.resolution {
+ transition: .3s !important;
+}
+
+.image:hover img {
+ box-shadow: 0 14px 28px var(--view-image-color);
+}
+
+.img_title {
+ display: block;
+ position: absolute;
+ width: 100%;
+ font-size: .9rem;
+ color: var(--fg);
+ padding: .5rem 0 0 0;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+}
+
+.img_title {
+ color: var(--blue);
+}
+
+.image img {
+ margin: 0;
+ padding: 0;
+ height: 100%;
+ width: 100%;
+ object-fit: cover;
+ vertical-align: bottom;
+ border-radius: 12px;
+ box-sizing: border-box;
+ border: 1px solid var(--border);
}
.image>a {
@@ -294,64 +237,13 @@ html {
}
.btn-nostyle {
+ font-family: 'Inter', Arial, Helvetica, sans-serif !important;
background-color: inherit;
border: none;
padding: 0px;
width: fit-content;
}
-.view-image {
- background-color: var(--view-image-color);
- width: 100%;
- height: auto;
- display: flex;
- justify-content: center;
- align-items: center;
-}
-
-.view-image img {
- max-width: 100%;
- max-height: 427px;
-}
-
-.image_view a {
- text-decoration: none;
- color: var(--link);
- font-size: 14px;
- overflow: hidden;
- text-overflow: ellipsis;
- white-space: nowrap;
-}
-
-.image_view a:hover {
- text-decoration: underline;
-}
-
-.image_view p {
- margin-left: 20px;
- margin-right: 20px;
- display: block;
-}
-
-.image_view a {
- display: block;
-}
-
-hr {
- border: 0;
- border-top: 1px solid var(--border);
- margin: 0;
- padding: 0;
-}
-
-.image_hide {
- display: none !important;
-}
-
-.image_show {
- display: block !important;
-}
-
.icon_visibility {
visibility: visible;
}
@@ -375,9 +267,11 @@ hr {
font-size: 16px;
}
-.video_title {
- font-size: 16px;
-}
+/*
+this is so stupid, separate css into general style and per result page css style to avoid this
+.video_title h3 {
+ margin-top: 0px !important;
+} */
.video_title a {
color: var(--link);
@@ -397,6 +291,7 @@ hr {
width: 254px;
height: 143px;
object-fit: cover;
+ border-radius: 5px;
}
.video__img__results {
@@ -427,13 +322,19 @@ hr {
.duration {
position: absolute;
color: #fff;
- font-size: 11px;
+ font-size: 12px;
+ font-weight: 500;
padding: .5em;
- background: rgba(0, 0, 0, .5);
- right: 0;
+ background: rgba(0, 0, 0, 0.8);
+ color: white;
+ padding: 4px 8px;
margin-top: -28px !important;
line-height: 1.3;
letter-spacing: -0.4px;
+ bottom: 6px;
+ right: 2px;
+ border-radius: 3px;
+ backdrop-filter: blur(2px);
}
.pipe {
@@ -483,11 +384,7 @@ hr {
}
#theme,
-#safe,
-#open-new-tab,
-#domain,
-#javascript-setting,
-#ux_lang {
+#safe {
border-radius: 4px;
padding: 6px;
font-size: 15px;
@@ -500,26 +397,11 @@ hr {
}
#theme:hover,
-#safe:hover,
-#open-new-tab:hover,
-#domain:hover,
-#javascript-setting:hover,
-#ux_lang:hover {
+#safe:hover {
border: 1px solid #5f6368;
cursor: pointer;
}
-.domain {
- margin-top: 10px;
- border: 1px solid var(--button);
- border-radius: 4px;
- padding: 6px;
- font-size: 15px;
- color: var(--font-fg);
- width: 90%;
- background: var(--button);
-}
-
.save {
border-radius: 4px;
padding: 6px;
@@ -537,10 +419,6 @@ hr {
cursor: pointer;
}
-.settings-row2 {
- border-bottom: 1px solid var(--html-bg) !important;
-}
-
.font-hide {
color: var(--html-bg);
}
@@ -567,7 +445,7 @@ hr {
.settings-row {
display: flex;
align-items: center;
- justify-content: flex-end;
+ justify-content: flex-end;
padding: 2px;
padding-left: 10px;
padding-right: 10px;
@@ -581,19 +459,6 @@ hr {
margin: 0;
}
-.kno_wiki {
- display: none;
-}
-
-.kno_wiki_show {
- display: initial !important;
- border-radius: 6px;
-}
-
-.open-in-new-tab * {
- display: inline-block;
-}
-
.results_settings {
color: var(--fg);
left: 175px;
@@ -623,6 +488,10 @@ hr {
text-align: left;
}
+.torrent-cat {
+ margin-top: 110px;
+}
+
.torrent-cat:hover,
.torrent-settings:hover,
.torrent-sort-save:hover {
@@ -647,24 +516,12 @@ form.torrent-sort {
transition: all .3s ease;
}
-.settings-results-link {
- position: absolute;
- color: var(--fg);
- text-decoration: none;
-}
-.settings-results-link,
.settings-icon-link {
color: var(--fg);
text-decoration: none;
}
-.settings-results-link {
- margin-left: 24px;
- display: block;
- margin-top: 1px;
-}
-
.wrapper-results input {
padding: 10px;
}
@@ -726,10 +583,6 @@ button {
font-family: 'Inter';
}
-.X {
- color: var(--font-fg);
-}
-
.search-container input {
width: 90%;
color: var(--font-fg);
@@ -757,42 +610,6 @@ p {
font-size: 14px;
}
-.check {
- display: flex;
- position: relative;
- left: 175px;
- width: 600px;
- word-wrap: break-word;
- font-size: 15px;
- max-width: 48em;
- line-height: 1.58;
-}
-
-.check p,
-.check h3 {
- font-size: 16px;
- margin-bottom: 2px;
- margin-top: 2px;
-}
-
-.check p {
- color: #ff7769;
-}
-
-.check a {
- color: var(--link);
- text-decoration: none;
-}
-
-.check h3 {
- margin-left: 4px;
- font-style: italic;
-}
-
-.check a:hover {
- text-decoration: underline;
-}
-
.torrent h3 {
font-size: 16px !important;
}
@@ -841,10 +658,6 @@ p {
border-top: 1px solid var(--border);
}
-#search_footer {
- position: fixed;
-}
-
.footer a,
.footer a:hover,
.footer a:visited {
@@ -882,36 +695,7 @@ p {
max-width: 600px;
word-wrap: break-word;
margin-bottom: 35px;
-}
-
-.result_sublink {
- position: relative;
- left: 175px;
- margin-top: 10px;
- max-width: 600px;
- word-wrap: break-word;
-}
-
-.sublinks {
- position: relative;
- left: 14px;
- max-width: 600px;
- word-wrap: break-word;
-}
-
-.search-button-wrapper button:hover {
- border: 1px solid #5f6368;
- cursor: pointer;
-}
-
-.search-button-wrapper button {
- color: var(--font-fg);
- background-color: var(--button);
- font-size: 14px;
- border: 1px solid var(--border);
- border-radius: 4px;
- padding: 13px 10px 13px 10px;
- margin: 70px 60px 0px 60px;
+ z-index: 0;
}
.no-decoration {
@@ -925,8 +709,8 @@ p {
text-decoration: none;
}
-.js-enabled {
- display: none;
+.js-enabled {
+ display: none;
}
.logomobile {
@@ -981,7 +765,8 @@ p {
:root {
/*--search-bg-input: #1a1a1a; /* Example background color */
- --search-bg-input-border: #2a2a2a; /* Visible border color */
+ --search-bg-input-border: #2a2a2a;
+ /* Visible border color */
color-scheme: dark;
}
@@ -999,13 +784,11 @@ p {
color: var(--link);
}
-.results a:visited h3,
-.result_sublink a:visited h3 {
+.results a:visited h3 {
color: var(--link-visited);
}
-.results h3,
-.result_sublink h3 {
+.results h3 {
margin-top: 4px;
margin-bottom: 2px;
font-size: 20px;
@@ -1014,18 +797,14 @@ p {
letter-spacing: -0.4px;
}
-.results p,
-.result_sublink p {
+.results p {
margin-top: 0px;
color: var(--font-fg);
}
.results a,
.results a:hover,
-.results a:visited,
-.result_sublink a,
-.result_sublink a:hover,
-.result_sublink a:visited {
+.results a:visited {
text-decoration: none;
/* color: #ebecf7; */
font-size: 14px;
@@ -1043,113 +822,6 @@ p {
white-space: nowrap;
}
-.snip {
- padding: 17px;
- border: 1px solid var(--snip-border);
- width: 440px;
- border-radius: 8px;
- background: var(--snip-background);
- color: var(--fg);
- margin-left: 840px;
- margin-top: 0px;
- position: absolute;
- max-width: 48em;
- line-height: 1.58;
- font-size: 14px;
- letter-spacing: normal;
-}
-
-.snip_title {
- margin-top: 0;
- margin-bottom: 0;
- color: var(--highlight);
- font-weight: bold;
- font-style: normal;
- max-width: 48em;
- line-height: 1.58;
- font-size: 22px;
-}
-
-.wiki_known_for {
- margin: 0px;
- font-weight: normal;
- margin-bottom: 10px;
-}
-
-.snip img {
- float: right;
- max-width: 30%;
- max-height: 150px;
- padding-bottom: 10px;
- margin-left: 10px;
-}
-
-.snip .wiki_link {
- margin-top: 10px;
- text-decoration: none;
- color: var(--link);
- font-size: 14px;
- overflow: hidden;
- text-overflow: ellipsis;
- white-space: nowrap;
-}
-
-.snip a:hover {
- text-decoration: underline;
-}
-
-.snip .about {
- font-size: 18px;
- margin-top: 10px;
- margin-bottom: 5px;
-}
-
-.factpoint {
- color: var(--fg);
- font-weight: bold;
- vertical-align: text-top;
- text-align: left;
- padding-right: 14px;
-}
-
-.fact a {
- color: var(--link);
- text-decoration: none;
-}
-.fact a:visited {
- color: var(--link-visited);
-}
-
-.snipp {
- padding: 10px;
- border-bottom: 1px solid var(--border);
- width: 500px;
- background: inherit;
- color: var(--fg);
- margin-left: 170px;
- margin-top: 0px;
- position: relative;
- line-height: 1.58;
- font-size: 16px;
- float: none;
- max-width: 48em;
-}
-
-.snipp a {
- display: block;
- margin-top: 10px;
- text-decoration: none;
- color: var(--link);
- font-size: 14px;
- overflow: hidden;
- text-overflow: ellipsis;
- white-space: nowrap;
-}
-
-.snipp a:hover {
- text-decoration: underline;
-}
-
.clickable {
cursor: pointer;
}
@@ -1161,8 +833,7 @@ p {
color: var(--fg);
width: 530px;
padding: 15px;
- margin-bottom: 627px;
- margin-top: 20px;
+ margin-top: 10px;
font-size: 14px;
line-height: 1.58;
letter-spacing: normal;
@@ -1172,159 +843,93 @@ p {
margin-left: 175px;
}
-/* MAP */
+/* Favicon styling */
+.result_header {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ margin-bottom: 4px;
+}
-.message {
- position: absolute;
- bottom: 20px;
- left: 50%;
- transform: translateX(-50%);
- padding: 10px;
- background-color: var(--search-bg);
- border: 1px solid var(--border);
- border-radius: 5px;
- box-shadow: 0 0 10px var(--box-shadow);
- z-index: 1000;
- width: auto;
- max-width: 80%;
- text-align: center;
+.favicon-container {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ height: 18px;
+ border-radius: 8%;
+ flex-shrink: 0;
+}
+
+.favicon {
+ width: 16px;
+ height: 16px;
+ border-radius: 3px;
+ box-shadow: 0 0 2px rgba(0, 0, 0, 0.4);
+}
+
+/* Result item spacing */
+.result_item {
+ margin-bottom: 1.5rem;
+}
+
+.result-title h3 {
+ margin: 4px 0;
+ font-weight: 400;
+}
+
+.single-line-ellipsis {
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ margin: 0;
+}
+
+.clamp-3-lines {
+ display: -webkit-box;
+ -webkit-box-orient: vertical;
+ -webkit-line-clamp: 3;
+
+ /* Standard syntax (future support) */
+ line-clamp: 3;
+ box-orient: vertical;
+
+ overflow: hidden;
+ text-overflow: ellipsis;
+ line-height: 1.5;
+ /* adjust if needed */
+ max-height: calc(1.5em * 3);
+ /* 3 lines */
+}
+
+.result-description {
+ margin: 4px 0 0 0;
+ color: var(--font-fg);
+ line-height: 1.4;
+}
+
+.results br {
+ display: none;
+}
+
+.result-url {
+ font-size: 14px;
+ color: var(--fg);
+ display: flex;
+ align-items: center;
+ flex-wrap: wrap;
+ gap: 4px;
+}
+
+body,
+h1,
+p,
+a,
+input,
+button {
color: var(--text-color);
-}
-
-/* Map container */
-#map {
- height: calc(100% - 60px);
- width: 100%;
- top: 60px;
-}
-
-/* Leaflet control buttons */
-.leaflet-control-locate,
-.leaflet-control-layers-toggle {
- background-color: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
-}
-
-.leaflet-bar a,
-.leaflet-bar a:hover {
- background-color: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
-}
-
-.leaflet-popup-content-wrapper,
-.leaflet-popup-tip {
- background: var(--html-bg) !important;
- color: var(--text-color) !important;
- border: 1px solid var(--border) !important;
-}
-
-.leaflet-popup-content-wrapper a {
- color: var(--link) !important;
-}
-
-.leaflet-popup-content-wrapper a:hover {
- text-decoration: underline;
-}
-
-/* Leaflet control buttons */
-.leaflet-control-locate,
-.leaflet-control-layers-toggle {
- background-color: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
-}
-
-.leaflet-bar a,
-.leaflet-bar a:hover {
- background-color: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
-}
-
-.leaflet-popup-content-wrapper,
-.leaflet-popup-tip {
- background: var(--html-bg) !important;
- color: var(--text-color) !important;
- border: 1px solid var(--border) !important;
-}
-
-.leaflet-popup-content-wrapper a {
- color: var(--link) !important;
-}
-
-.leaflet-popup-content-wrapper a:hover {
- text-decoration: underline;
-}
-
-/* Leaflet attribution control */
-.leaflet-control-attribution {
- background: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
-}
-
-.leaflet-control-attribution a {
- color: var(--link) !important;
-}
-
-.leaflet-control-attribution a:hover {
- text-decoration: underline !important;
-}
-
-/* Leaflet scale control */
-.leaflet-control-scale {
- background: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
- text-shadow: 1px 1px 2px var(--border) !important; /* Adjust text shadow */
-}
-
-.leaflet-control-scale-line {
- background: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
- text-shadow: 1px 1px 2px var(--border) !important; /* Adjust text shadow */
-}
-
-.message-bottom-left {
- display: none;
- align-items: center;
- justify-content: center;
- position: fixed;
- bottom: 20px;
- right: 20px;
- background-color: var(--search-bg);
- color: var(--text-color);
- padding: 10px;
- border-radius: 5px;
- z-index: 1000;
- text-align: center;
- flex-direction: column;
- border: 1px solid var(--border);
- box-shadow: 0 0 10px var(--box-shadow);
-}
-
-body, h1, p, a, input, button {
- color: var(--text-color); /* Applies the text color based on theme */
- background-color: var(--background-color); /* Applies the background color based on theme */
-}
-
-@media only screen and (max-width: 1320px) {
-
- .snip {
- margin-left: 780px;
- }
-}
-
-@media only screen and (max-width: 1220px) {
-
- .snip {
- position: relative;
- float: none;
- margin-left: 170px;
- }
+ /* Applies the text color based on theme */
+ background-color: var(--background-color);
+ /* Applies the background color based on theme */
}
@media only screen and (max-width: 880px) {
@@ -1333,7 +938,6 @@ body, h1, p, a, input, button {
margin-top: -15px !important;
}
- .snip,
.no-results-found {
position: relative;
float: none;
@@ -1343,12 +947,7 @@ body, h1, p, a, input, button {
font-size: 13px;
}
- .calc {
- left: 20px;
- }
-
- .results h3,
- .result_sublink h3 {
+ .results h3 {
font-size: 16px;
}
@@ -1362,14 +961,6 @@ body, h1, p, a, input, button {
height: 77px;
}
- .snip a {
- font-size: 13px;
- }
-
- .snip_title {
- font-size: 16px;
- }
-
.settings {
max-width: 100%;
margin-left: 0px;
@@ -1434,20 +1025,6 @@ body, h1, p, a, input, button {
max-width: 92%;
}
- .check {
- left: 20px;
- max-width: 90%;
- }
-
- .snipp {
- position: relative;
- float: none;
- max-width: 90%;
- margin-left: 10px;
- width: auto;
- font-size: 16px;
- }
-
p {
font-size: 13px;
line-height: normal;
@@ -1461,10 +1038,6 @@ body, h1, p, a, input, button {
font-size: 18px;
}
- .sublinks {
- max-width: 90%;
- }
-
.results-search-container {
margin-left: auto;
margin-right: auto;
@@ -1477,19 +1050,6 @@ body, h1, p, a, input, button {
margin-left: 3px;
}
- .image_view {
- width: 100%;
- height: 77%;
- margin-top: -33px;
- margin-right: 0%;
- border-top-right-radius: 0px;
- border-top-left-radius: 0px;
- }
-
- .view-image img {
- max-height: 250px;
- }
-
.images {
margin-right: 1.2% !important;
}
@@ -1508,7 +1068,8 @@ body, h1, p, a, input, button {
padding: 10px;
display: block;
margin-top: 0px;
- top: -5px; /* how ? why ? I dont even wanna know */
+ top: -5px;
+ /* how ? why ? I dont even wanna know */
left: 0px;
}
@@ -1517,10 +1078,6 @@ body, h1, p, a, input, button {
height: 35px;
}
- .mobile-none {
- display: none;
- }
-
.sub-search-button-wrapper {
margin: 0;
padding: 0;
@@ -1576,11 +1133,6 @@ body, h1, p, a, input, button {
max-width: 90%;
}
- .result_sublink {
- left: 20px;
- max-width: 90%;
- }
-
.fetched {
left: 20px;
max-width: 90%;
@@ -1588,16 +1140,22 @@ body, h1, p, a, input, button {
margin-top: -2px;
}
- .fetched_img {
- margin-top: 135px !important;
- margin-left: 1.2% !important;
- left: 0px !important;
+ .fetched_vid {
+ margin-top: 25px !important;
}
- .fetched_vid {
+ .fetched_dif_videos {
margin-top: 135px !important;
}
+ .fetched_dif_files {
+ margin-top: 25px !important;
+ }
+
+ .fetched_dif_images {
+ margin-top: 25px;
+ }
+
.results_settings {
left: 20px;
font-size: 13px;
@@ -1609,6 +1167,7 @@ body, h1, p, a, input, button {
}
form.torrent-sort {
+ margin-top: 35px;
left: 20px;
}
@@ -1634,10 +1193,6 @@ body, h1, p, a, input, button {
margin-left: 3px;
}
- .search-button-wrapper button {
- display: none;
- }
-
.icon-button {
margin-top: 30px;
}
@@ -1650,10 +1205,6 @@ body, h1, p, a, input, button {
margin-top: 30px;
}
- #clearSearch {
- top: 6px;
- }
-
}
@media only screen and (max-width: 470px) {
@@ -1667,7 +1218,8 @@ body, h1, p, a, input, button {
padding-right: 10px;
position: relative;
scroll-behavior: smooth;
- scrollbar-width: none; /* Hide scrollbar for Firefox */
+ scrollbar-width: none;
+ /* Hide scrollbar for Firefox */
}
/* Hide scrollbar for Webkit-based browsers (e.g., Chrome, Safari) */
@@ -1677,15 +1229,20 @@ body, h1, p, a, input, button {
/* Gradient as a fixed visual indicator */
.sub-search-button-wrapper::after {
- content: ''; /* Adds a gradient or fade effect as a scroll indicator */
- position: fixed; /* Ensure it doesn't move with scrolling */
+ content: '';
+ /* Adds a gradient or fade effect as a scroll indicator */
+ position: fixed;
+ /* Ensure it doesn't move with scrolling */
top: 90px;
right: 0;
height: 40px;
width: 20px;
- background: linear-gradient(to left, var(--html-bg), rgba(255, 255, 255, 0)); /* Adjust colors */
- pointer-events: none; /* Prevent interaction */
- z-index: 1; /* Ensure it is above the scrollable content */
+ background: linear-gradient(to left, var(--html-bg), rgba(255, 255, 255, 0));
+ /* Adjust colors */
+ pointer-events: none;
+ /* Prevent interaction */
+ z-index: 1;
+ /* Ensure it is above the scrollable content */
}
/* Hide gradient when fully scrolled */
@@ -1694,7 +1251,7 @@ body, h1, p, a, input, button {
}
}
- /* This is really bad */
+/* This is really bad */
@media only screen and (max-width: 400px) {
.icon-button {
@@ -1702,29 +1259,7 @@ body, h1, p, a, input, button {
}
.results_settings {
- display: flex; /* Enable flexbox */
- flex-wrap: wrap; /* Allow elements to wrap onto new lines when necessary */
- }
-}
-
-/* Ensuring dark theme compliance */
-@media (prefers-color-scheme: dark) {
- .leaflet-control-locate,
- .leaflet-control-layers-toggle,
- .leaflet-bar a,
- .leaflet-bar a:hover,
- .leaflet-popup-content-wrapper,
- .leaflet-popup-tip,
- .leaflet-control-attribution,
- .leaflet-control-scale,
- .leaflet-control-scale-line {
- background-color: var(--button) !important;
- border: 1px solid var(--border) !important;
- color: var(--fg) !important;
- text-shadow: 1px 1px 2px var(--background-color) !important; /* Dark theme shadow adjustment */
- }
-
- .leaflet-control-attribution a {
- color: var(--link) !important;
+ display: flex; /* Enable flexbox */
+ flex-wrap: wrap; /* Allow elements to wrap onto new lines when necessary */
}
}
\ No newline at end of file
diff --git a/static/fonts/MaterialIcons-Round.woff2 b/static/fonts/MaterialIcons-Round.woff2
new file mode 100644
index 0000000..45ed1fb
Binary files /dev/null and b/static/fonts/MaterialIcons-Round.woff2 differ
diff --git a/static/fonts/material-icons-round-v108-latin-regular.woff2 b/static/fonts/material-icons-round-v108-latin-regular.woff2
deleted file mode 100644
index 6f6a973..0000000
Binary files a/static/fonts/material-icons-round-v108-latin-regular.woff2 and /dev/null differ
diff --git a/static/images/globe.svg b/static/images/globe.svg
new file mode 100644
index 0000000..4837352
--- /dev/null
+++ b/static/images/globe.svg
@@ -0,0 +1,3 @@
+
diff --git a/static/images/icon-512.svg b/static/images/icon-512.svg
new file mode 100644
index 0000000..3e0b851
--- /dev/null
+++ b/static/images/icon-512.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/static/js/calculator.js b/static/js/calculator.js
new file mode 100644
index 0000000..71fc552
--- /dev/null
+++ b/static/js/calculator.js
@@ -0,0 +1,53 @@
+document.addEventListener('DOMContentLoaded', function () {
+ const calcContainer = document.getElementById('dynamic-calc');
+ const staticCalc = document.getElementById('static-calc');
+
+ if (calcContainer) {
+ calcContainer.style.display = 'block';
+ if (staticCalc) staticCalc.style.display = 'none';
+
+ const input = document.getElementById('calc-input');
+ const history = document.getElementById('calc-history');
+ const buttons = document.querySelectorAll('.calc-buttons button');
+
+ const staticCalcContent = staticCalc?.textContent.trim(); // load HTML calc
+ const initialQuery = input.value.trim();
+
+ let currentInput = initialQuery;
+ let historyLog = [];
+
+ if (initialQuery && staticCalcContent) {
+ historyLog.push(`${initialQuery} = ${staticCalcContent}`);
+ }
+
+ const updateUI = () => {
+ input.value = currentInput;
+ history.innerHTML = historyLog.map(entry => `
${entry}
`).join('');
+ };
+
+ buttons.forEach(button => {
+ button.addEventListener('click', () => {
+ const val = button.getAttribute('data-value');
+
+ if (val === 'C') {
+ currentInput = '';
+ } else if (val === '=') {
+ try {
+ const res = eval(currentInput);
+ historyLog.push(`${currentInput} = ${res}`);
+ if (historyLog.length > 30) historyLog.shift(); // Remove oldest
+ currentInput = res.toString();
+ } catch (e) {
+ currentInput = 'Error';
+ }
+ } else {
+ currentInput += val;
+ }
+
+ updateUI();
+ });
+ });
+
+ updateUI();
+ }
+});
diff --git a/static/js/dynamicscrolling.js b/static/js/dynamicscrolling.js
index fb1ed87..f648454 100644
--- a/static/js/dynamicscrolling.js
+++ b/static/js/dynamicscrolling.js
@@ -1,61 +1,58 @@
/*
This script is responsible for fetching new results on the same page when the user scrolls to the bottom of the page.
*/
-document.addEventListener("DOMContentLoaded", function() {
+document.addEventListener("DOMContentLoaded", function () {
const templateData = document.getElementById('template-data');
let page = parseInt(templateData.getAttribute('data-page')) || 1;
const query = templateData.getAttribute('data-query') || '';
let searchType = templateData.getAttribute('data-type') || 'text'; // Default to 'text' if not provided
let loading = false;
let hasMoreResults = true;
- const loadingIndicator = document.getElementById('message-bottom-left');
+ const loadingIndicator = document.getElementById('message-bottom-right');
let loadingTimeout;
- function loadResults(newPage) {
- if (loading || !hasMoreResults) return;
- loading = true;
+ function showLoadingMessage() {
+ loadingIndicator.classList.add('visible');
+ }
- // Show loading indicator if taking more than 150ms
- loadingTimeout = setTimeout(() => {
- loadingIndicator.style.display = 'flex';
- }, 150);
+ function hideLoadingMessage() {
+ loadingIndicator.classList.remove('visible');
+ }
+
+ function loadResults(newPage) {
+ if (loading || !hasMoreResults || hasEndOrNoResultsMessage()) return;
+
+ loading = true;
+ loadingTimeout = setTimeout(() => showLoadingMessage(), 150);
fetch(`/search?q=${encodeURIComponent(query)}&t=${encodeURIComponent(searchType)}&p=${newPage}`)
.then(response => {
- if (!response.ok) {
- throw new Error('Network response was not ok');
- }
+ if (!response.ok) throw new Error('Network response was not ok');
return response.text();
})
.then(data => {
clearTimeout(loadingTimeout);
- loadingIndicator.style.display = 'none';
+ hideLoadingMessage();
+
const parser = new DOMParser();
const doc = parser.parseFromString(data, 'text/html');
- const newResultsHTML = doc.getElementById('results').innerHTML;
- const noResultsMessage = `No results found for '${query}'. Try different keywords.`;
- const endOfResultsMessage = "Looks like this is the end of results.";
- const serverError = "Internal Server Error";
-
+ const newResults = doc.querySelectorAll('#results > *');
const resultsContainer = document.getElementById('results');
- const tempDiv = document.createElement('div');
- tempDiv.innerHTML = newResultsHTML;
- while (tempDiv.firstChild) {
- resultsContainer.appendChild(tempDiv.firstChild);
+
+ if (newResults.length === 0) {
+ hasMoreResults = false;
+ loading = false;
+ return;
}
- if (newResultsHTML.includes(noResultsMessage) || newResultsHTML.includes(endOfResultsMessage) || newResultsHTML.includes(serverError)) {
- hasMoreResults = false;
- } else {
- page = newPage;
- // Automatically load more results if content height is less than window height
- checkIfMoreResultsNeeded();
- }
+ newResults.forEach(el => resultsContainer.appendChild(el));
+ page = newPage;
+ checkIfMoreResultsNeeded();
loading = false;
})
.catch(error => {
clearTimeout(loadingTimeout);
- loadingIndicator.style.display = 'none';
+ hideLoadingMessage();
console.error('Error loading results:', error);
hasMoreResults = false;
loading = false;
@@ -63,11 +60,17 @@ document.addEventListener("DOMContentLoaded", function() {
}
function checkIfMoreResultsNeeded() {
- if (document.body.scrollHeight <= window.innerHeight && hasMoreResults) {
+ if (!hasMoreResults || hasEndOrNoResultsMessage()) return;
+
+ if (document.body.scrollHeight <= window.innerHeight) {
loadResults(page + 1);
}
}
+ function hasEndOrNoResultsMessage() {
+ return !!document.querySelector('.no-results-found');
+ }
+
window.addEventListener('scroll', () => {
if (window.innerHeight + window.scrollY >= document.body.offsetHeight) {
loadResults(page + 1);
@@ -80,7 +83,7 @@ document.addEventListener("DOMContentLoaded", function() {
console.error("No search buttons found");
} else {
buttons.forEach(btn => {
- btn.addEventListener('click', function() {
+ btn.addEventListener('click', function () {
const activeElement = document.querySelector('.search-container-results-btn .search-active');
if (activeElement) {
activeElement.classList.remove('search-active');
diff --git a/static/js/dynamicscrollingimages.js b/static/js/dynamicscrollingimages.js
index 6969a53..ef0da92 100644
--- a/static/js/dynamicscrollingimages.js
+++ b/static/js/dynamicscrollingimages.js
@@ -1,197 +1,186 @@
(function() {
- // Configuration
- const imageStatusInterval = 500; // Interval in milliseconds to check image status
- const scrollThreshold = 500; // Distance from bottom of the page to trigger loading
- const loadingIndicator = document.getElementById('message-bottom-left');
- let loadingTimer;
+ // Add loading effects to image and title
+ function addLoadingEffects(imgElement) {
+ const container = imgElement.closest('.image');
+ if (!container) return; // avoid null dereference
+
+ const title = imgElement.closest('.image').querySelector('.img_title');
+ imgElement.classList.add('loading-image');
+ title.classList.add('title-loading');
+ }
+
+ function removeLoadingEffects(imgElement) {
+ const title = imgElement.closest('.image').querySelector('.img_title');
+ imgElement.classList.remove('loading-image');
+ title.classList.remove('title-loading');
+
+ if (imgElement.src.endsWith('/images/missing.svg')) {
+ imgElement.closest('.image').remove();
+ }
+ }
+
+ // Modified handleImageError with theme-consistent error handling
+ function handleImageError(imgElement, retryCount = 3, retryDelay = 1000) {
+ const container = imgElement.closest('.image');
+ const title = container.querySelector('.img_title');
+
+ if (retryCount > 0) {
+ setTimeout(() => {
+ imgElement.src = imgElement.getAttribute('data-full');
+ imgElement.onerror = () => handleImageError(imgElement, retryCount - 1, retryDelay);
+ }, retryDelay);
+ } else {
+ imgElement.classList.remove('loading-image');
+ title.classList.remove('title-loading');
+ container.style.display = 'none';
+ }
+ }
+
+ const imageStatusInterval = 500;
+ const scrollThreshold = 500;
+ const loadingIndicator = document.getElementById('message-bottom-right'); let loadingTimer;
let isFetching = false;
let page = parseInt(document.getElementById('template-data').getAttribute('data-page')) || 1;
let query = document.getElementById('template-data').getAttribute('data-query');
let hardCacheEnabled = document.getElementById('template-data').getAttribute('data-hard-cache-enabled') === 'true';
- let noMoreImages = false; // Flag to indicate if there are no more images to load
+ let noMoreImages = false;
let imageElements = [];
let imageIds = [];
+ let imageStatusTimer;
- /**
- * Function to handle image load errors with retry logic
- * @param {HTMLElement} imgElement - The image element that failed to load
- * @param {number} retryCount - Number of retries left
- * @param {number} retryDelay - Delay between retries in milliseconds
- */
- function handleImageError(imgElement, retryCount = 3, retryDelay = 1000) {
- if (retryCount > 0) {
- setTimeout(() => {
- imgElement.src = imgElement.getAttribute('data-full');
- imgElement.onerror = function() {
- handleImageError(imgElement, retryCount - 1, retryDelay);
- };
- }, retryDelay);
- } else {
- // After retries, hide the image container or set a fallback image
- console.warn('Image failed to load after retries:', imgElement.getAttribute('data-full'));
- imgElement.parentElement.style.display = 'none'; // Hide the image container
- // Alternatively, set a fallback image:
- // imgElement.src = '/static/images/fallback.svg';
- }
+ function showLoadingMessage() {
+ loadingIndicator.classList.add('visible');
+ }
+
+ function hideLoadingMessage() {
+ loadingIndicator.classList.remove('visible');
}
- /**
- * Function to ensure the page is scrollable by loading more images if necessary
- */
function ensureScrollable() {
- if (noMoreImages) return; // Do not attempt if no more images are available
- // Check if the page is not scrollable
+ if (noMoreImages) return;
if (document.body.scrollHeight <= window.innerHeight) {
- // If not scrollable, fetch the next page
fetchNextPage();
}
}
- /**
- * Function to fetch the next page of images
- */
function fetchNextPage() {
if (isFetching || noMoreImages) return;
-
- // Start the timer for loading indicator
loadingTimer = setTimeout(() => {
- loadingIndicator.style.display = 'flex';
+ showLoadingMessage();
}, 150);
-
isFetching = true;
page += 1;
-
+
fetch(`/search?q=${encodeURIComponent(query)}&t=image&p=${page}&ajax=true`)
.then(response => response.text())
.then(html => {
- clearTimeout(loadingTimer); // Clear the timer if fetch is successful
- loadingIndicator.style.display = 'none'; // Hide the loading indicator
-
- let parser = new DOMParser();
- let doc = parser.parseFromString(html, 'text/html');
- let newImages = doc.querySelectorAll('.image');
-
+ clearTimeout(loadingTimer);
+ hideLoadingMessage();
+
+ let tempDiv = document.createElement('div');
+ tempDiv.innerHTML = html;
+ let newImages = tempDiv.querySelectorAll('.image');
+
if (newImages.length > 0) {
let resultsContainer = document.querySelector('.images');
newImages.forEach(imageDiv => {
- // Append new images to the container
- resultsContainer.appendChild(imageDiv);
+ let clonedImageDiv = imageDiv.cloneNode(true);
+ resultsContainer.appendChild(clonedImageDiv);
- // Get the img element
- let img = imageDiv.querySelector('img');
- if (img) {
- let id = img.getAttribute('data-id');
- if (id) {
- imageElements.push(img);
- imageIds.push(id);
- }
+ let img = clonedImageDiv.querySelector('img');
+ if (img && img.getAttribute('data-id')) {
+ addLoadingEffects(img);
if (hardCacheEnabled) {
- // Replace image with placeholder
- img.src = '/static/images/placeholder.svg';
- img.onerror = function() {
- handleImageError(img);
- };
+ img.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
+ img.onerror = () => handleImageError(img);
+ imageElements.push(img);
+ imageIds.push(img.getAttribute('data-id'));
} else {
- // HardCacheEnabled is false; load images immediately
img.src = img.getAttribute('data-full');
- img.onerror = function() {
- handleImageError(img);
- };
+ img.onload = () => removeLoadingEffects(img);
+ img.onerror = () => handleImageError(img);
}
}
});
- if (hardCacheEnabled) {
- checkImageStatus();
- }
- // After appending new images, ensure the page is scrollable
+
+ if (hardCacheEnabled) checkImageStatus();
ensureScrollable();
} else {
- // No more images to load
noMoreImages = true;
}
isFetching = false;
})
.catch(error => {
- clearTimeout(loadingTimer); // Clear the timer if fetch fails
- loadingIndicator.style.display = 'none'; // Hide the loading indicator
- console.error('Error fetching next page:', error);
+ clearTimeout(loadingTimer);
+ hideLoadingMessage();
+ console.error('Fetch error:', error);
isFetching = false;
});
}
- /**
- * Function to check image status via AJAX
- */
function checkImageStatus() {
- if (!hardCacheEnabled) return;
- if (imageIds.length === 0) {
- // No images to check, do nothing
- return;
- }
+ if (!hardCacheEnabled || imageIds.length === 0) return;
- // Send AJAX request to check image status
fetch(`/image_status?image_ids=${imageIds.join(',')}`)
.then(response => response.json())
.then(statusMap => {
- imageElements = imageElements.filter(img => {
- let id = img.getAttribute('data-id');
+ const pendingImages = [];
+ const pendingIds = [];
+
+ imageElements.forEach(img => {
+ const id = img.getAttribute('data-id');
if (statusMap[id]) {
- // Image is ready, update src
img.src = statusMap[id];
- img.onerror = function() {
- handleImageError(img);
- };
- // Remove the image id from the list
- imageIds = imageIds.filter(imageId => imageId !== id);
- return false; // Remove img from imageElements
+ img.onload = () => removeLoadingEffects(img);
+ img.onerror = () => handleImageError(img);
+ } else {
+ pendingImages.push(img);
+ pendingIds.push(id);
}
- return true; // Keep img in imageElements
});
- // After updating images, ensure the page is scrollable
+
+ imageElements = pendingImages;
+ imageIds = pendingIds;
ensureScrollable();
})
.catch(error => {
- console.error('Error checking image status:', error);
+ console.error('Status check error:', error);
});
}
- // Initialize imageElements and imageIds
- imageElements = Array.from(document.querySelectorAll('img[data-id]'));
- imageIds = imageElements
- .map(img => img.getAttribute('data-id'))
- .filter(id => id); // Exclude empty IDs
+ // Initialize with loading effects
+ document.querySelectorAll('img[data-id]').forEach(img => {
+ const id = img.getAttribute('data-id');
+ if (id) {
+ addLoadingEffects(img);
+ imageElements.push(img);
+ imageIds.push(id);
+ if (hardCacheEnabled) {
+ img.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
+ } else {
+ img.src = img.getAttribute('data-full');
+ img.onload = () => removeLoadingEffects(img);
+ }
+ img.onerror = () => handleImageError(img);
+ }
+ });
+ // Rest of your existing code remains unchanged
if (hardCacheEnabled) {
- // Replace images with placeholders
- imageElements.forEach(img => {
- img.src = '/static/images/placeholder.svg';
- });
-
- // Start checking image status
- let imageStatusTimer = setInterval(checkImageStatus, imageStatusInterval);
- checkImageStatus(); // Initial check
- } else {
- // HardCacheEnabled is false; load images immediately
- imageElements.forEach(img => {
- img.src = img.getAttribute('data-full');
- img.onerror = function() {
- handleImageError(img);
- };
- });
+ imageStatusTimer = setInterval(checkImageStatus, imageStatusInterval);
+ checkImageStatus();
}
- // After initial images are loaded, ensure the page is scrollable
window.addEventListener('load', ensureScrollable);
-
- // Infinite scrolling
- window.addEventListener('scroll', function() {
+ window.addEventListener('scroll', () => {
if (isFetching || noMoreImages) return;
-
if (window.innerHeight + window.scrollY >= document.body.offsetHeight - scrollThreshold) {
- // User scrolled near the bottom
fetchNextPage();
}
});
+ window.addEventListener('beforeunload', () => {
+ if (imageStatusTimer) clearInterval(imageStatusTimer);
+ });
})();
\ No newline at end of file
diff --git a/static/js/dynamicscrollingtext.js b/static/js/dynamicscrollingtext.js
new file mode 100644
index 0000000..98811b5
--- /dev/null
+++ b/static/js/dynamicscrollingtext.js
@@ -0,0 +1,277 @@
+(function() {
+ // Get template data and configuration
+ const templateData = document.getElementById('template-data');
+ const type = templateData.getAttribute('data-type');
+ const hardCacheEnabled = templateData.getAttribute('data-hard-cache-enabled') === 'true';
+
+ // Track all favicon/image elements and their IDs
+ let allMediaElements = [];
+ let allMediaIds = [];
+ const mediaMap = new Map();
+
+ // Add loading effects to image/favicon and associated text
+ function addLoadingEffects(imgElement) {
+ const container = imgElement.closest(type === 'image' ? '.image' : '.result_item');
+ if (!container) return;
+
+ const titleSelector = type === 'image' ? '.img_title' : '.result-url';
+ const title = container.querySelector(titleSelector);
+ imgElement.closest('.favicon-wrapper')?.classList.add('loading');
+ // if (title) title.classList.add('title-loading');
+ }
+
+ // Remove loading effects when image/favicon loads
+ function removeLoadingEffects(imgElement) {
+ const container = imgElement.closest(type === 'image' ? '.image' : '.result_item');
+ const titleSelector = type === 'image' ? '.img_title' : '.result-url';
+ const title = container?.querySelector(titleSelector);
+ imgElement.closest('.favicon-wrapper')?.classList.remove('loading');
+ if (title) title.classList.remove('title-loading');
+
+ if (type === 'image' && imgElement.src.endsWith('/images/globe.svg')) {
+ container.remove();
+ }
+ }
+
+ // Handle image/favicon loading errors
+ function handleImageError(imgElement, retryCount = 8, retryDelay = 500) {
+ const isFavicon = !!imgElement.closest('.favicon-wrapper');
+ const container = imgElement.closest(type === 'image' ? '.image' : '.result_item');
+ const titleSelector = type === 'image' ? '.img_title' : '.result-url';
+ const title = container?.querySelector(titleSelector);
+ const fullURL = imgElement.getAttribute('data-full');
+
+ if (retryCount > 0 && !imgElement.dataset.checked404) {
+ imgElement.dataset.checked404 = '1'; // avoid infinite loop
+
+ fetch(fullURL, { method: 'HEAD' })
+ .then(res => {
+ if (res.status === 404) {
+ fallbackToGlobe(imgElement);
+ } else {
+ setTimeout(() => {
+ imgElement.src = fullURL;
+ imgElement.onerror = () => handleImageError(imgElement, retryCount - 1, retryDelay);
+ }, retryDelay);
+ }
+ })
+ .catch(() => {
+ fallbackToGlobe(imgElement);
+ });
+ } else {
+ fallbackToGlobe(imgElement);
+ }
+
+ function fallbackToGlobe(imgElement) {
+ imgElement.closest('.favicon-wrapper')?.classList.remove('loading');
+ if (title) title.classList.remove('title-loading');
+
+ if (isFavicon) {
+ const wrapper = imgElement.closest('.favicon-wrapper') || imgElement.parentElement;
+ const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg");
+ svg.setAttribute("xmlns", "http://www.w3.org/2000/svg");
+ svg.setAttribute("viewBox", "0 -960 960 960");
+ svg.setAttribute("height", imgElement.height || "16");
+ svg.setAttribute("width", imgElement.width || "16");
+ svg.setAttribute("fill", "currentColor");
+ svg.classList.add("favicon", "globe-fallback");
+ svg.innerHTML = ``;
+ imgElement.remove();
+ wrapper.appendChild(svg);
+ } else if (type === 'image') {
+ container?.remove();
+ }
+ }
+ }
+
+ // Shared configuration
+ const statusCheckInterval = 500;
+ const scrollThreshold = 500;
+ const loadingIndicator = document.getElementById('message-bottom-right');
+ let loadingTimer;
+ let isFetching = false;
+ let page = parseInt(templateData.getAttribute('data-page')) || 1;
+ let query = templateData.getAttribute('data-query');
+ let noMoreImages = false;
+
+ function showLoadingMessage() {
+ loadingIndicator.classList.add('visible');
+ }
+
+ function hideLoadingMessage() {
+ loadingIndicator.classList.remove('visible');
+ }
+
+ function ensureScrollable() {
+ if (noMoreImages) return;
+ if (document.body.scrollHeight <= window.innerHeight) {
+ fetchNextPage();
+ }
+ }
+
+ // Register a new media element for tracking
+ function registerMediaElement(imgElement) {
+ const id = imgElement.getAttribute('data-id');
+ if (!id) return;
+
+ let wrapper = imgElement.closest('.favicon-wrapper');
+ if (!wrapper) {
+ wrapper = document.createElement('span');
+ wrapper.classList.add('favicon-wrapper');
+ imgElement.replaceWith(wrapper);
+ wrapper.appendChild(imgElement);
+ }
+
+ addLoadingEffects(imgElement);
+
+ if (hardCacheEnabled) {
+ imgElement.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
+ imgElement.onerror = () => handleImageError(imgElement, 3, 1000);
+ } else {
+ imgElement.src = imgElement.getAttribute('data-full');
+ imgElement.onload = () => removeLoadingEffects(imgElement);
+ imgElement.onerror = () => handleImageError(imgElement, 3, 1000);
+ }
+
+ // Track it
+ if (!mediaMap.has(id)) {
+ mediaMap.set(id, []);
+ }
+ mediaMap.get(id).push(imgElement);
+ }
+
+ // Check status of all tracked media elements
+ function checkMediaStatus() {
+ const allIds = Array.from(mediaMap.keys());
+ if (allIds.length === 0) return;
+
+ const idGroups = [];
+ for (let i = 0; i < allIds.length; i += 50) {
+ idGroups.push(allIds.slice(i, i + 50));
+ }
+
+ const processGroups = async () => {
+ const stillPending = new Map();
+
+ for (const group of idGroups) {
+ try {
+ const response = await fetch(`/image_status?image_ids=${group.join(',')}`);
+ const statusMap = await response.json();
+
+ group.forEach(id => {
+ const elements = mediaMap.get(id);
+ const resolved = statusMap[id];
+ if (!elements) return;
+ if (resolved && resolved !== 'pending') {
+ elements.forEach(img => {
+ img.src = resolved;
+ img.onload = () => removeLoadingEffects(img);
+ img.onerror = () => handleImageError(img);
+ });
+ } else {
+ stillPending.set(id, elements);
+ }
+ });
+ } catch (err) {
+ console.error('Status check failed:', err);
+ group.forEach(id => {
+ if (mediaMap.has(id)) {
+ stillPending.set(id, mediaMap.get(id));
+ }
+ });
+ }
+ }
+
+ mediaMap.clear();
+ for (const [id, imgs] of stillPending) {
+ mediaMap.set(id, imgs);
+ }
+ };
+
+ processGroups();
+ }
+
+ function fetchNextPage() {
+ if (isFetching || noMoreImages) return;
+
+ loadingTimer = setTimeout(() => {
+ showLoadingMessage();
+ }, 150);
+
+ isFetching = true;
+ page += 1;
+
+ fetch(`/search?q=${encodeURIComponent(query)}&t=${type}&p=${page}&ajax=true`)
+ .then(response => response.text())
+ .then(html => {
+ clearTimeout(loadingTimer);
+ hideLoadingMessage();
+
+ let tempDiv = document.createElement('div');
+ tempDiv.innerHTML = html;
+ let newItems = tempDiv.querySelectorAll(type === 'image' ? '.image' : '.result_item');
+
+ if (newItems.length > 0) {
+ let resultsContainer = document.querySelector(type === 'image' ? '.images' : '.results');
+ newItems.forEach(item => {
+ let clonedItem = item.cloneNode(true);
+ resultsContainer.appendChild(clonedItem);
+
+ // Register any new media elements
+ const img = clonedItem.querySelector('img[data-id]');
+ if (img) {
+ registerMediaElement(img);
+ }
+ });
+
+ ensureScrollable();
+ } else {
+ noMoreImages = true;
+ }
+ isFetching = false;
+ })
+ .catch(error => {
+ clearTimeout(loadingTimer);
+ hideLoadingMessage();
+ console.error('Fetch error:', error);
+ isFetching = false;
+ });
+ }
+
+ // Initialize all existing media elements
+ function initializeMediaElements() {
+ document.querySelectorAll('img[data-id]').forEach(img => {
+ registerMediaElement(img);
+ });
+ }
+
+ function startStatusPolling() {
+ checkMediaStatus();
+ setInterval(checkMediaStatus, statusCheckInterval);
+ }
+
+ if (document.readyState === 'complete') {
+ initializeMediaElements();
+ if (hardCacheEnabled) startStatusPolling();
+ } else {
+ window.addEventListener('load', () => {
+ initializeMediaElements();
+ if (hardCacheEnabled) startStatusPolling();
+ });
+ }
+
+ // Infinite scroll handler
+ window.addEventListener('scroll', () => {
+ if (isFetching || noMoreImages) return;
+ if (window.innerHeight + window.scrollY >= document.body.offsetHeight - scrollThreshold) {
+ fetchNextPage();
+ }
+ });
+
+ // // Clean up on page unload
+ // window.addEventListener('beforeunload', () => {
+ // if (statusCheckTimeout) {
+ // clearTimeout(statusCheckTimeout);
+ // }
+ // });
+})();
\ No newline at end of file
diff --git a/static/js/imageviewer.js b/static/js/imageviewer.js
index 4bd667f..e04b350 100644
--- a/static/js/imageviewer.js
+++ b/static/js/imageviewer.js
@@ -28,26 +28,41 @@ document.addEventListener('DOMContentLoaded', function() {
-
- Show source website
- Show in fullscreen
-
+
+
`;
const imageView = viewerOverlay.querySelector('#image-viewer');
- if (!imageView) {
- console.error('imageView is null');
- }
-
const imagesContainer = document.querySelector('.images');
- if (!imagesContainer) {
- console.error('imagesContainer is null');
- }
function openImageViewer(element) {
- initializeImageList(); // Update the image list
-
+ initializeImageList();
const parentImageDiv = element.closest('.image');
if (!parentImageDiv) return;
@@ -61,75 +76,62 @@ document.addEventListener('DOMContentLoaded', function() {
document.body.classList.add('viewer-open');
viewerOverlay.style.display = 'block';
- imageView.classList.remove('image_hide');
- imageView.classList.add('image_show');
+ imageView.classList.replace('image_hide', 'image_show');
}
+ let fullImageUrl, sourceUrl, proxyFullUrl;
+
function displayImage(index) {
if (index < 0 || index >= imageList.length) return;
- // Remove the `.image_selected` class from all images
imageList.forEach(img => {
const parentImageDiv = img.closest('.image');
- if (parentImageDiv) {
- parentImageDiv.classList.remove('image_selected');
- }
+ parentImageDiv?.classList.remove('image_selected');
});
-
+
const imgElement = imageList[index];
const parentImageDiv = imgElement.closest('.image');
+ parentImageDiv?.classList.add('image_selected');
- if (!parentImageDiv) {
- console.warn('Parent image div not found');
- return;
- }
+ fullImageUrl = imgElement.getAttribute('data-full') || imgElement.src;
+ sourceUrl = imgElement.getAttribute('data-source');
+ proxyFullUrl = imgElement.getAttribute('data-proxy-full') || fullImageUrl;
- // Add the `.image_selected` class to the currently displayed image
- parentImageDiv.classList.add('image_selected');
-
- // Use the `data-full` attribute for the full image URL
- let fullImageUrl = imgElement.getAttribute('data-full') || imgElement.src;
- const title = imgElement.alt || 'Untitled';
-
- // Get the source URL from the data-source attribute
- const sourceUrl = imgElement.getAttribute('data-source');
-
- // Fallback logic: if sourceUrl is null, use `data-proxy-full` or a meaningful default
- const proxyFullUrl = imgElement.getAttribute('data-proxy-full') || fullImageUrl;
-
- // Elements in the viewer
const viewerImage = document.getElementById('viewer-image');
const viewerTitle = document.getElementById('viewer-title');
- const fullSizeLink = document.getElementById('viewer-full-size-link');
- const proxySizeLink = document.getElementById('viewer-proxy-size-link');
-
- viewerTitle.textContent = title;
- fullSizeLink.href = sourceUrl || proxyFullUrl;
-
- // Remove previous event listeners to avoid stacking
- viewerImage.onload = null;
- viewerImage.onerror = null;
-
- // Set up the error handler to switch to the proxy image if the full image fails to load
- viewerImage.onerror = function() {
- // Use the proxy image as a fallback
- viewerImage.src = proxyFullUrl;
- proxySizeLink.href = proxyFullUrl;
- };
-
- // Set up the load handler to ensure the proxySizeLink is set correctly if the image loads
- viewerImage.onload = function() {
- proxySizeLink.href = fullImageUrl;
- };
-
- // Start loading the image
+
+ viewerTitle.textContent = imgElement.alt || 'Untitled';
+
+ viewerImage.onerror = () => viewerImage.src = proxyFullUrl;
+ viewerImage.onload = () => {};
+
viewerImage.src = fullImageUrl;
- }
+ }
- document.body.addEventListener('click', function(e) {
- let target = e.target;
- let clickableElement = target.closest('img.clickable, .img_title.clickable');
+ document.getElementById('viewer-copy-link').onclick = () => {
+ navigator.clipboard.writeText(window.location.origin + fullImageUrl).catch(console.error);
+ };
+ document.getElementById('viewer-open-image').onclick = () => {
+ window.open(fullImageUrl, '_blank');
+ };
+
+ document.getElementById('viewer-open-source').onclick = () => {
+ window.open(sourceUrl || proxyFullUrl, '_blank');
+ };
+
+ document.getElementById('viewer-download-image').onclick = (event) => {
+ event.stopPropagation();
+ const a = document.createElement('a');
+ a.href = fullImageUrl;
+ a.download = fullImageUrl.split('/').pop();
+ document.body.appendChild(a);
+ a.click();
+ document.body.removeChild(a);
+ };
+
+ document.body.addEventListener('click', e => {
+ const clickableElement = e.target.closest('img.clickable, .img_title.clickable');
if (clickableElement) {
e.preventDefault();
openImageViewer(clickableElement);
@@ -137,65 +139,31 @@ document.addEventListener('DOMContentLoaded', function() {
});
function closeImageViewer() {
- imageView.classList.remove('image_show');
- imageView.classList.add('image_hide');
+ imageView.classList.replace('image_show', 'image_hide');
viewerOpen = false;
currentIndex = -1;
imagesContainer.classList.add('images_viewer_hidden');
document.body.classList.remove('viewer-open');
viewerOverlay.style.display = 'none';
-
- // Remove `.image_selected` from all images
- imageList.forEach(img => {
- const parentImageDiv = img.closest('.image');
- if (parentImageDiv) {
- parentImageDiv.classList.remove('image_selected');
- }
- });
+
+ imageList.forEach(img => img.closest('.image')?.classList.remove('image_selected'));
}
- // Navigation functions
- function showPreviousImage() {
- if (currentIndex > 0) {
- currentIndex--;
- displayImage(currentIndex);
- }
- }
+ document.getElementById('viewer-close-button').onclick = closeImageViewer;
+ document.getElementById('viewer-prev-button').onclick = () => currentIndex > 0 && displayImage(--currentIndex);
+ document.getElementById('viewer-next-button').onclick = () => currentIndex < imageList.length - 1 && displayImage(++currentIndex);
- function showNextImage() {
- if (currentIndex < imageList.length - 1) {
- currentIndex++;
- displayImage(currentIndex);
- }
- }
-
- // Event listeners for navigation and closing
- document.getElementById('viewer-close-button').addEventListener('click', closeImageViewer);
- document.getElementById('viewer-prev-button').addEventListener('click', showPreviousImage);
- document.getElementById('viewer-next-button').addEventListener('click', showNextImage);
-
- // Close viewer when clicking outside the image
- document.addEventListener('click', function(e) {
- if (viewerOpen) {
- const target = e.target;
- const clickedInsideViewer = viewerOverlay.contains(target) || target.closest('.image');
- if (!clickedInsideViewer) {
- closeImageViewer();
- }
+ document.addEventListener('click', e => {
+ if (viewerOpen && !viewerOverlay.contains(e.target) && !e.target.closest('.image')) {
+ closeImageViewer();
}
});
- // Handle keyboard events for closing and navigation
- document.addEventListener('keydown', function(e) {
- if (viewerOpen) {
- if (e.key === 'Escape') {
- closeImageViewer();
- } else if (e.key === 'ArrowLeft') {
- showPreviousImage();
- } else if (e.key === 'ArrowRight') {
- showNextImage();
- }
- }
- });
-});
+ document.addEventListener('keydown', e => {
+ if (!viewerOpen) return;
+ if (e.key === 'Escape') closeImageViewer();
+ if (e.key === 'ArrowLeft' && currentIndex > 0) displayImage(--currentIndex);
+ if (e.key === 'ArrowRight' && currentIndex < imageList.length - 1) displayImage(++currentIndex);
+ });
+});
\ No newline at end of file
diff --git a/templates/files.html b/templates/files.html
index ff35355..ec6127d 100755
--- a/templates/files.html
+++ b/templates/files.html
@@ -1,5 +1,6 @@
+
@@ -11,45 +12,54 @@
-
+
+
+
+
+
@@ -61,12 +71,9 @@
-

-
QGato
-
A open-source private search engine.
+

+
{{ translate "site_name" }}
+
{{ translate "site_description" }}