Compare commits
No commits in common. "main" and "indexing" have entirely different histories.
153 changed files with 3166 additions and 9319 deletions
|
@ -1,158 +0,0 @@
|
||||||
name: QGato CLI Release Build
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch: {}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: debian
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout source
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install latest Go
|
|
||||||
run: |
|
|
||||||
echo "📡 Fetching latest Go version string from go.dev..."
|
|
||||||
GO_FULL=$(curl -sSL https://go.dev/VERSION?m=text | head -n1)
|
|
||||||
echo "🔍 Latest Go version: $GO_FULL"
|
|
||||||
|
|
||||||
ARCHIVE="${GO_FULL}.linux-amd64.tar.gz"
|
|
||||||
curl -LO "https://go.dev/dl/${ARCHIVE}"
|
|
||||||
|
|
||||||
mkdir -p "$HOME/.local/go"
|
|
||||||
tar -C "$HOME/.local/go" --strip-components=1 -xzf "$ARCHIVE"
|
|
||||||
|
|
||||||
echo "$HOME/.local/go/bin" >> $GITHUB_PATH
|
|
||||||
echo "✅ Go installed locally to $HOME/.local/go"
|
|
||||||
|
|
||||||
- name: Check Go version
|
|
||||||
run: go version
|
|
||||||
|
|
||||||
- name: Extract version from version.txt
|
|
||||||
id: version
|
|
||||||
run: |
|
|
||||||
VERSION=$(cat version.txt)
|
|
||||||
VERSION="v${VERSION#v}"
|
|
||||||
echo "$VERSION" > version.txt
|
|
||||||
echo "✅ Detected version: $VERSION"
|
|
||||||
|
|
||||||
- name: Build all targets
|
|
||||||
run: |
|
|
||||||
mkdir -p bundles
|
|
||||||
|
|
||||||
PLATFORMS=(
|
|
||||||
"linux/amd64"
|
|
||||||
"linux/arm64"
|
|
||||||
"linux/arm/v7"
|
|
||||||
"linux/arm/v6"
|
|
||||||
"linux/riscv64"
|
|
||||||
"windows/amd64"
|
|
||||||
"windows/arm64"
|
|
||||||
)
|
|
||||||
|
|
||||||
for TARGET in "${PLATFORMS[@]}"; do
|
|
||||||
OS=$(echo "$TARGET" | cut -d/ -f1)
|
|
||||||
ARCH=$(echo "$TARGET" | cut -d/ -f2)
|
|
||||||
VARIANT=$(echo "$TARGET" | cut -d/ -f3)
|
|
||||||
|
|
||||||
OUT="qgato-${OS}-${ARCH}"
|
|
||||||
[ -n "$VARIANT" ] && OUT="${OUT}${VARIANT}"
|
|
||||||
BIN="$OUT"
|
|
||||||
[ "$OS" = "windows" ] && BIN="${OUT}.exe"
|
|
||||||
|
|
||||||
echo "🔨 Building $BIN"
|
|
||||||
|
|
||||||
# Disable CGO for cross-compiled targets (everything except native linux/amd64)
|
|
||||||
if [ "$TARGET" = "linux/amd64" ]; then
|
|
||||||
export CGO_ENABLED=1
|
|
||||||
else
|
|
||||||
export CGO_ENABLED=0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$ARCH" = "arm" ]; then
|
|
||||||
case "$VARIANT" in
|
|
||||||
v7) GOARM=7 ;;
|
|
||||||
v6) GOARM=6 ;;
|
|
||||||
*) GOARM=7 ;;
|
|
||||||
esac
|
|
||||||
GOOS=$OS GOARCH=arm GOARM=$GOARM \
|
|
||||||
go build -ldflags="-s -w" -o "$BIN" ./.
|
|
||||||
else
|
|
||||||
GOOS=$OS GOARCH=$ARCH \
|
|
||||||
go build -ldflags="-s -w" -o "$BIN" ./.
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "📦 Packaging $BIN with required files..."
|
|
||||||
|
|
||||||
PKG_DIR="bundle-$OUT"
|
|
||||||
mkdir "$PKG_DIR"
|
|
||||||
cp "$BIN" "$PKG_DIR/"
|
|
||||||
cp -r lang static templates config.ini "$PKG_DIR/" 2>/dev/null || true
|
|
||||||
|
|
||||||
if [ "$OS" = "windows" ]; then
|
|
||||||
zip -r "bundles/$OUT.zip" "$PKG_DIR"
|
|
||||||
else
|
|
||||||
tar -czf "bundles/$OUT.tar.gz" "$PKG_DIR"
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf "$PKG_DIR" "$BIN"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Create Forgejo release
|
|
||||||
run: |
|
|
||||||
TAG_NAME=$(cat version.txt)
|
|
||||||
echo "📦 Creating release for tag: $TAG_NAME"
|
|
||||||
|
|
||||||
DOWNLOAD_BASE="https://weforge.xyz/spitfire/Search/releases/download/$TAG_NAME"
|
|
||||||
|
|
||||||
echo "| Arch | Linux Bundle (.tar.gz) | Windows Bundle (.zip) |" > release.md
|
|
||||||
echo "|---------|---------------------------------------------------|--------------------------------------------------|" >> release.md
|
|
||||||
echo "| amd64 | [qgato-linux-amd64.tar.gz]($DOWNLOAD_BASE/qgato-linux-amd64.tar.gz) | [qgato-windows-amd64.zip]($DOWNLOAD_BASE/qgato-windows-amd64.zip) |" >> release.md
|
|
||||||
echo "| arm64 | [qgato-linux-arm64.tar.gz]($DOWNLOAD_BASE/qgato-linux-arm64.tar.gz) | [qgato-windows-arm64.zip]($DOWNLOAD_BASE/qgato-windows-arm64.zip) |" >> release.md
|
|
||||||
echo "| armv7 | [qgato-linux-armv7.tar.gz]($DOWNLOAD_BASE/qgato-linux-armv7.tar.gz) | — |" >> release.md
|
|
||||||
echo "| armv6 | [qgato-linux-armv6.tar.gz]($DOWNLOAD_BASE/qgato-linux-armv6.tar.gz) | — |" >> release.md
|
|
||||||
echo "| riscv64 | [qgato-linux-riscv64.tar.gz]($DOWNLOAD_BASE/qgato-linux-riscv64.tar.gz) | — |" >> release.md
|
|
||||||
|
|
||||||
RELEASE_BODY=$(cat release.md | jq -Rs .)
|
|
||||||
|
|
||||||
curl -sSL -X POST "$FORGEJO_API/repos/${OWNER}/${REPO}/releases" \
|
|
||||||
-H "Authorization: token $FORGEJO_TOKEN" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "{
|
|
||||||
\"tag_name\": \"$TAG_NAME\",
|
|
||||||
\"name\": \"$TAG_NAME\",
|
|
||||||
\"body\": $RELEASE_BODY,
|
|
||||||
\"draft\": false,
|
|
||||||
\"prerelease\": false
|
|
||||||
}"
|
|
||||||
env:
|
|
||||||
FORGEJO_API: https://weforge.xyz/api/v1
|
|
||||||
OWNER: spitfire
|
|
||||||
REPO: Search
|
|
||||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_TOKEN }}
|
|
||||||
|
|
||||||
- name: Upload all bundles
|
|
||||||
run: |
|
|
||||||
TAG_NAME=$(cat version.txt)
|
|
||||||
RELEASE_ID=$(curl -s -H "Authorization: token $FORGEJO_TOKEN" \
|
|
||||||
"$FORGEJO_API/repos/${OWNER}/${REPO}/releases/tags/$TAG_NAME" | jq -r .id)
|
|
||||||
|
|
||||||
for FILE in bundles/*; do
|
|
||||||
NAME=$(basename "$FILE")
|
|
||||||
echo "📤 Uploading $NAME"
|
|
||||||
|
|
||||||
CONTENT_TYPE="application/octet-stream"
|
|
||||||
[[ "$FILE" == *.zip ]] && CONTENT_TYPE="application/zip"
|
|
||||||
[[ "$FILE" == *.tar.gz ]] && CONTENT_TYPE="application/gzip"
|
|
||||||
|
|
||||||
curl -sSL -X POST "$FORGEJO_API/repos/${OWNER}/${REPO}/releases/${RELEASE_ID}/assets?name=$NAME" \
|
|
||||||
-H "Authorization: token $FORGEJO_TOKEN" \
|
|
||||||
-H "Content-Type: $CONTENT_TYPE" \
|
|
||||||
--data-binary "@$FILE"
|
|
||||||
done
|
|
||||||
env:
|
|
||||||
FORGEJO_API: https://weforge.xyz/api/v1
|
|
||||||
OWNER: spitfire
|
|
||||||
REPO: Search
|
|
||||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_TOKEN }}
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -6,5 +6,4 @@ cache/
|
||||||
*.min.js
|
*.min.js
|
||||||
*.min.css
|
*.min.css
|
||||||
qgato
|
qgato
|
||||||
qgato.exe
|
|
||||||
test.py
|
test.py
|
24
README.md
24
README.md
|
@ -47,11 +47,11 @@ A self-hosted private search engine designed to be scalable and more resource-ef
|
||||||
|
|
||||||
### For Self-Hosting
|
### For Self-Hosting
|
||||||
|
|
||||||
- **[Easy to Set Up](https://weforge.xyz/Spitfire/Search/wiki/Setup-Other)** - Quick and straightforward setup process for anyone.
|
- **Self-hosted option** - Run on your own server for even more privacy.
|
||||||
- **Lightweight** - Low memory footprint (15-30MiB) even during searches.
|
- **Lightweight** - Low memory footprint (15-30MiB) even during searches.
|
||||||
- **Decentralized** - No single point of failure.
|
- **Decentralized** - No single point of failure.
|
||||||
- **Results caching in RAM** - Faster response times through caching.
|
- **Results caching in RAM** - Faster response times through caching.
|
||||||
- **[Configurable](https://weforge.xyz/Spitfire/Search/wiki/Config)** - Fully customizable via the `config.ini` file.
|
- **Configurable** - Tweak features via `config.ini`.
|
||||||
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
|
- **Flexible media support** - Images optionally stored on HDD/SSD for caching and improved response time.
|
||||||
|
|
||||||
### Results Sources
|
### Results Sources
|
||||||
|
@ -73,20 +73,30 @@ A self-hosted private search engine designed to be scalable and more resource-ef
|
||||||
|
|
||||||
### Running the QGato
|
### Running the QGato
|
||||||
|
|
||||||
|
Linux:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://weforge.xyz/Spitfire/Search.git
|
git clone https://weforge.xyz/Spitfire/Search.git
|
||||||
cd Search
|
cd Search
|
||||||
go run .
|
chmod +x ./run.sh
|
||||||
|
./run.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Windows:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
git clone https://weforge.xyz/Spitfire/Search.git
|
||||||
|
cd Search
|
||||||
|
.\run.bat
|
||||||
```
|
```
|
||||||
|
|
||||||
*Its that easy!*
|
*Its that easy!*
|
||||||
|
|
||||||
### Configuring
|
### Configuring
|
||||||
|
|
||||||
- Configuration is done via the `config.ini` file.
|
Configuration is done via the ``config.ini`` file.
|
||||||
- On first start, you will be guided through the basic setup.
|
On first start, you will be guided through the basic setup.
|
||||||
- For more advanced configuration options, visit the [Wiki Configuration Page](https://weforge.xyz/Spitfire/Search/wiki/Config).
|
More advanced setup and all options will be listed here later, as this is still being updated.
|
||||||
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|
162
agent.go
162
agent.go
|
@ -11,13 +11,11 @@ import (
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// BrowserVersion represents the version & global usage from the caniuse data
|
|
||||||
type BrowserVersion struct {
|
type BrowserVersion struct {
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
Global float64 `json:"global"`
|
Global float64 `json:"global"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// BrowserData holds sets of versions for Firefox and Chromium
|
|
||||||
type BrowserData struct {
|
type BrowserData struct {
|
||||||
Firefox []BrowserVersion `json:"firefox"`
|
Firefox []BrowserVersion `json:"firefox"`
|
||||||
Chromium []BrowserVersion `json:"chrome"`
|
Chromium []BrowserVersion `json:"chrome"`
|
||||||
|
@ -30,7 +28,6 @@ var (
|
||||||
}{
|
}{
|
||||||
data: make(map[string]string),
|
data: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
browserCache = struct {
|
browserCache = struct {
|
||||||
sync.RWMutex
|
sync.RWMutex
|
||||||
data BrowserData
|
data BrowserData
|
||||||
|
@ -40,19 +37,26 @@ var (
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
// fetchLatestBrowserVersions retrieves usage data from caniuse.com’s fulldata JSON.
|
|
||||||
func fetchLatestBrowserVersions() (BrowserData, error) {
|
func fetchLatestBrowserVersions() (BrowserData, error) {
|
||||||
const urlCaniuse = "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
|
url := "https://raw.githubusercontent.com/Fyrd/caniuse/master/fulldata-json/data-2.0.json"
|
||||||
|
|
||||||
|
// // Optional: skip TLS verification to avoid certificate errors
|
||||||
|
// transport := &http.Transport{
|
||||||
|
// TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Increase the HTTP client timeout
|
||||||
client := &http.Client{
|
client := &http.Client{
|
||||||
Timeout: 30 * time.Second,
|
Timeout: 30 * time.Second,
|
||||||
|
// Transport: transport,
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", urlCaniuse, nil)
|
// Build the request manually to set headers
|
||||||
|
req, err := http.NewRequest("GET", url, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return BrowserData{}, err
|
return BrowserData{}, err
|
||||||
}
|
}
|
||||||
|
// Custom user agent and English language preference
|
||||||
// Set a simple custom User-Agent and language
|
|
||||||
req.Header.Set("User-Agent", "MyCustomAgent/1.0 (compatible; +https://example.com)")
|
req.Header.Set("User-Agent", "MyCustomAgent/1.0 (compatible; +https://example.com)")
|
||||||
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||||
|
|
||||||
|
@ -67,42 +71,36 @@ func fetchLatestBrowserVersions() (BrowserData, error) {
|
||||||
return BrowserData{}, err
|
return BrowserData{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var rawData map[string]any
|
var rawData map[string]interface{}
|
||||||
if err := json.Unmarshal(body, &rawData); err != nil {
|
if err := json.Unmarshal(body, &rawData); err != nil {
|
||||||
return BrowserData{}, err
|
return BrowserData{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
stats, ok := rawData["agents"].(map[string]any)
|
stats := rawData["agents"].(map[string]interface{})
|
||||||
if !ok {
|
|
||||||
return BrowserData{}, fmt.Errorf("unexpected JSON structure (no 'agents' field)")
|
|
||||||
}
|
|
||||||
|
|
||||||
var data BrowserData
|
var data BrowserData
|
||||||
|
|
||||||
// Extract Firefox data
|
if firefoxData, ok := stats["firefox"].(map[string]interface{}); ok {
|
||||||
if firefoxData, ok := stats["firefox"].(map[string]any); ok {
|
for version, usage := range firefoxData["usage_global"].(map[string]interface{}) {
|
||||||
if usageMap, ok := firefoxData["usage_global"].(map[string]any); ok {
|
data.Firefox = append(data.Firefox, BrowserVersion{
|
||||||
for version, usage := range usageMap {
|
Version: version,
|
||||||
val, _ := usage.(float64)
|
Global: usage.(float64),
|
||||||
data.Firefox = append(data.Firefox, BrowserVersion{Version: version, Global: val})
|
})
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract Chrome data
|
if chromeData, ok := stats["chrome"].(map[string]interface{}); ok {
|
||||||
if chromeData, ok := stats["chrome"].(map[string]any); ok {
|
for version, usage := range chromeData["usage_global"].(map[string]interface{}) {
|
||||||
if usageMap, ok := chromeData["usage_global"].(map[string]any); ok {
|
data.Chromium = append(data.Chromium, BrowserVersion{
|
||||||
for version, usage := range usageMap {
|
Version: version,
|
||||||
val, _ := usage.(float64)
|
Global: usage.(float64),
|
||||||
data.Chromium = append(data.Chromium, BrowserVersion{Version: version, Global: val})
|
})
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// getLatestBrowserVersions checks the cache and fetches new data if expired
|
|
||||||
func getLatestBrowserVersions() (BrowserData, error) {
|
func getLatestBrowserVersions() (BrowserData, error) {
|
||||||
browserCache.RLock()
|
browserCache.RLock()
|
||||||
if time.Now().Before(browserCache.expires) {
|
if time.Now().Before(browserCache.expires) {
|
||||||
|
@ -119,36 +117,37 @@ func getLatestBrowserVersions() (BrowserData, error) {
|
||||||
|
|
||||||
browserCache.Lock()
|
browserCache.Lock()
|
||||||
browserCache.data = data
|
browserCache.data = data
|
||||||
browserCache.expires = time.Now().Add(24 * time.Hour) // Refresh daily
|
browserCache.expires = time.Now().Add(24 * time.Hour)
|
||||||
browserCache.Unlock()
|
browserCache.Unlock()
|
||||||
|
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// randomUserAgent picks a random browser (Firefox/Chromium), selects a version based on usage,
|
|
||||||
// picks an OS string, and composes a User-Agent header.
|
|
||||||
func randomUserAgent() (string, error) {
|
func randomUserAgent() (string, error) {
|
||||||
browsers, err := getLatestBrowserVersions()
|
browsers, err := getLatestBrowserVersions()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
rand := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||||
|
|
||||||
// Overall usage: 85% chance for Chromium, 15% for Firefox
|
// Simulated browser usage statistics (in percentages)
|
||||||
usageStats := map[string]float64{
|
usageStats := map[string]float64{
|
||||||
"Firefox": 15.0,
|
"Firefox": 30.0,
|
||||||
"Chromium": 85.0,
|
"Chromium": 70.0,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Weighted random selection of the browser type
|
// Calculate the probabilities for the versions
|
||||||
|
probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
|
||||||
|
|
||||||
|
// Select a browser based on usage statistics
|
||||||
browserType := ""
|
browserType := ""
|
||||||
randVal := r.Float64() * 100
|
randVal := rand.Float64() * 100
|
||||||
cumulative := 0.0
|
cumulative := 0.0
|
||||||
for bType, usage := range usageStats {
|
for browser, usage := range usageStats {
|
||||||
cumulative += usage
|
cumulative += usage
|
||||||
if randVal < cumulative {
|
if randVal < cumulative {
|
||||||
browserType = bType
|
browserType = browser
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -165,16 +164,14 @@ func randomUserAgent() (string, error) {
|
||||||
return "", fmt.Errorf("no versions found for browser: %s", browserType)
|
return "", fmt.Errorf("no versions found for browser: %s", browserType)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort by global usage descending
|
// Sort versions by usage (descending order)
|
||||||
sort.Slice(versions, func(i, j int) bool {
|
sort.Slice(versions, func(i, j int) bool {
|
||||||
return versions[i].Global > versions[j].Global
|
return versions[i].Global > versions[j].Global
|
||||||
})
|
})
|
||||||
|
|
||||||
// Probability distribution for top few versions
|
// Select a version based on the probabilities
|
||||||
probabilities := []float64{0.5, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.0078125, 0.00390625}
|
|
||||||
|
|
||||||
version := ""
|
version := ""
|
||||||
randVal = r.Float64()
|
randVal = rand.Float64()
|
||||||
cumulative = 0.0
|
cumulative = 0.0
|
||||||
for i, p := range probabilities {
|
for i, p := range probabilities {
|
||||||
cumulative += p
|
cumulative += p
|
||||||
|
@ -184,72 +181,68 @@ func randomUserAgent() (string, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to the least used version if none matched
|
// Fallback to the last version if none matched
|
||||||
if version == "" {
|
if version == "" {
|
||||||
version = versions[len(versions)-1].Version
|
version = versions[len(versions)-1].Version
|
||||||
}
|
}
|
||||||
|
|
||||||
userAgent := generateUserAgent(browserType, version, r)
|
// Generate the user agent string
|
||||||
|
userAgent := generateUserAgent(browserType, version)
|
||||||
return userAgent, nil
|
return userAgent, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// generateUserAgent composes the final UA string given the browser, version, and OS.
|
func generateUserAgent(browser, version string) string {
|
||||||
func generateUserAgent(browser, version string, r *rand.Rand) string {
|
|
||||||
oses := []struct {
|
oses := []struct {
|
||||||
os string
|
os string
|
||||||
probability float64
|
probability float64
|
||||||
}{
|
}{
|
||||||
{"Windows NT 10.0; Win64; x64", 44.0},
|
{"Windows NT 10.0; Win64; x64", 44.0},
|
||||||
{"X11; Linux x86_64", 2.0},
|
{"Windows NT 11.0; Win64; x64", 44.0},
|
||||||
{"X11; Ubuntu; Linux x86_64", 2.0},
|
{"X11; Linux x86_64", 1.0},
|
||||||
|
{"X11; Ubuntu; Linux x86_64", 1.0},
|
||||||
{"Macintosh; Intel Mac OS X 10_15_7", 10.0},
|
{"Macintosh; Intel Mac OS X 10_15_7", 10.0},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Weighted random selection for OS
|
// Select an OS based on probabilities
|
||||||
randVal := r.Float64() * 100
|
randVal := rand.Float64() * 100
|
||||||
cumulative := 0.0
|
cumulative := 0.0
|
||||||
selectedOS := oses[0].os // Default in case distribution is off
|
selectedOS := ""
|
||||||
for _, entry := range oses {
|
for _, os := range oses {
|
||||||
cumulative += entry.probability
|
cumulative += os.probability
|
||||||
if randVal < cumulative {
|
if randVal < cumulative {
|
||||||
selectedOS = entry.os
|
selectedOS = os.os
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch browser {
|
switch browser {
|
||||||
case "Firefox":
|
case "Firefox":
|
||||||
// Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:117.0) Gecko/20100101 Firefox/117.0
|
|
||||||
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
|
return fmt.Sprintf("Mozilla/5.0 (%s; rv:%s) Gecko/20100101 Firefox/%s", selectedOS, version, version)
|
||||||
case "Chromium":
|
case "Chromium":
|
||||||
// Example: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.5938.132 Safari/537.36
|
|
||||||
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
|
return fmt.Sprintf("Mozilla/5.0 (%s) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", selectedOS, version)
|
||||||
default:
|
}
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// updateCachedUserAgents randomly updates half of the cached UAs to new versions
|
|
||||||
func updateCachedUserAgents(newVersions BrowserData) {
|
func updateCachedUserAgents(newVersions BrowserData) {
|
||||||
cache.Lock()
|
cache.Lock()
|
||||||
defer cache.Unlock()
|
defer cache.Unlock()
|
||||||
|
|
||||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
|
||||||
for key, userAgent := range cache.data {
|
for key, userAgent := range cache.data {
|
||||||
if r.Float64() < 0.5 {
|
randVal := rand.Float64()
|
||||||
updatedUserAgent := updateUserAgentVersion(userAgent, newVersions, r)
|
if randVal < 0.5 {
|
||||||
|
updatedUserAgent := updateUserAgentVersion(userAgent, newVersions)
|
||||||
cache.data[key] = updatedUserAgent
|
cache.data[key] = updatedUserAgent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// updateUserAgentVersion tries to parse the old UA, detect its browser, and update the version
|
func updateUserAgentVersion(userAgent string, newVersions BrowserData) string {
|
||||||
func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.Rand) string {
|
// Parse the current user agent to extract browser and version
|
||||||
var browserType, version string
|
var browserType, version string
|
||||||
|
|
||||||
// Attempt to detect old UA patterns (Chromium or Firefox)
|
|
||||||
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||||
browserType = "Chromium"
|
browserType = "Chromium"
|
||||||
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||||
|
browserType = "Chromium"
|
||||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||||
browserType = "Chromium"
|
browserType = "Chromium"
|
||||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36", &version); err == nil {
|
||||||
|
@ -258,6 +251,8 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.R
|
||||||
browserType = "Chromium"
|
browserType = "Chromium"
|
||||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||||
browserType = "Firefox"
|
browserType = "Firefox"
|
||||||
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (Windows NT 11.0; Win64; x64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||||
|
browserType = "Firefox"
|
||||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||||
browserType = "Firefox"
|
browserType = "Firefox"
|
||||||
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
} else if _, err := fmt.Sscanf(userAgent, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:%s) Gecko/20100101 Firefox/%s", &version, &version); err == nil {
|
||||||
|
@ -266,37 +261,22 @@ func updateUserAgentVersion(userAgent string, newVersions BrowserData, r *rand.R
|
||||||
browserType = "Firefox"
|
browserType = "Firefox"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Grab the newest version from the fetched data
|
// Get the latest version for that browser
|
||||||
var latestVersion string
|
var latestVersion string
|
||||||
if browserType == "Firefox" && len(newVersions.Firefox) > 0 {
|
if browserType == "Firefox" && len(newVersions.Firefox) > 0 {
|
||||||
// Sort by usage descending
|
|
||||||
sort.Slice(newVersions.Firefox, func(i, j int) bool {
|
|
||||||
return newVersions.Firefox[i].Global > newVersions.Firefox[j].Global
|
|
||||||
})
|
|
||||||
latestVersion = newVersions.Firefox[0].Version
|
latestVersion = newVersions.Firefox[0].Version
|
||||||
} else if browserType == "Chromium" && len(newVersions.Chromium) > 0 {
|
} else if browserType == "Chromium" && len(newVersions.Chromium) > 0 {
|
||||||
// Sort by usage descending
|
|
||||||
sort.Slice(newVersions.Chromium, func(i, j int) bool {
|
|
||||||
return newVersions.Chromium[i].Global > newVersions.Chromium[j].Global
|
|
||||||
})
|
|
||||||
latestVersion = newVersions.Chromium[0].Version
|
latestVersion = newVersions.Chromium[0].Version
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we failed to detect the browser or have no data, just return the old UA
|
// Update the user agent string with the new version
|
||||||
if browserType == "" || latestVersion == "" {
|
return generateUserAgent(browserType, latestVersion)
|
||||||
return userAgent
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new random OS-based UA string with the latest version
|
|
||||||
return generateUserAgent(browserType, latestVersion, r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// periodicAgentUpdate periodically refreshes browser data and user agents
|
|
||||||
func periodicAgentUpdate() {
|
func periodicAgentUpdate() {
|
||||||
for {
|
for {
|
||||||
// Sleep a random interval between 1 and 2 days
|
// Sleep for a random interval between 1 and 2 days
|
||||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
time.Sleep(time.Duration(24+rand.Intn(24)) * time.Hour)
|
||||||
time.Sleep(time.Duration(24+r.Intn(24)) * time.Hour)
|
|
||||||
|
|
||||||
// Fetch the latest browser versions
|
// Fetch the latest browser versions
|
||||||
newVersions, err := fetchLatestBrowserVersions()
|
newVersions, err := fetchLatestBrowserVersions()
|
||||||
|
@ -316,7 +296,6 @@ func periodicAgentUpdate() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetUserAgent returns a cached UA for the given key or creates one if none exists.
|
|
||||||
func GetUserAgent(cacheKey string) (string, error) {
|
func GetUserAgent(cacheKey string) (string, error) {
|
||||||
cache.RLock()
|
cache.RLock()
|
||||||
userAgent, found := cache.data[cacheKey]
|
userAgent, found := cache.data[cacheKey]
|
||||||
|
@ -335,11 +314,9 @@ func GetUserAgent(cacheKey string) (string, error) {
|
||||||
cache.data[cacheKey] = userAgent
|
cache.data[cacheKey] = userAgent
|
||||||
cache.Unlock()
|
cache.Unlock()
|
||||||
|
|
||||||
printDebug("Generated (cached or new) user agent: %s", userAgent)
|
|
||||||
return userAgent, nil
|
return userAgent, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetNewUserAgent always returns a newly generated UA, overwriting the cache.
|
|
||||||
func GetNewUserAgent(cacheKey string) (string, error) {
|
func GetNewUserAgent(cacheKey string) (string, error) {
|
||||||
userAgent, err := randomUserAgent()
|
userAgent, err := randomUserAgent()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -350,7 +327,6 @@ func GetNewUserAgent(cacheKey string) (string, error) {
|
||||||
cache.data[cacheKey] = userAgent
|
cache.data[cacheKey] = userAgent
|
||||||
cache.Unlock()
|
cache.Unlock()
|
||||||
|
|
||||||
printDebug("Generated new user agent: %s", userAgent)
|
|
||||||
return userAgent, nil
|
return userAgent, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
134
cache-images.go
134
cache-images.go
|
@ -19,7 +19,6 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/chai2010/webp"
|
"github.com/chai2010/webp"
|
||||||
"github.com/fyne-io/image/ico"
|
|
||||||
"golang.org/x/image/bmp"
|
"golang.org/x/image/bmp"
|
||||||
"golang.org/x/image/tiff"
|
"golang.org/x/image/tiff"
|
||||||
)
|
)
|
||||||
|
@ -36,7 +35,7 @@ var (
|
||||||
imageURLMapMu sync.RWMutex
|
imageURLMapMu sync.RWMutex
|
||||||
)
|
)
|
||||||
|
|
||||||
func cacheImage(imageURL, imageID string, imageType string) (string, bool, error) {
|
func cacheImage(imageURL, imageID string, isThumbnail bool) (string, bool, error) {
|
||||||
if imageURL == "" {
|
if imageURL == "" {
|
||||||
recordInvalidImageID(imageID)
|
recordInvalidImageID(imageID)
|
||||||
return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
|
return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
|
||||||
|
@ -44,15 +43,10 @@ func cacheImage(imageURL, imageID string, imageType string) (string, bool, error
|
||||||
|
|
||||||
// Construct the filename based on the image ID and type
|
// Construct the filename based on the image ID and type
|
||||||
var filename string
|
var filename string
|
||||||
switch imageType {
|
if isThumbnail {
|
||||||
case "thumb":
|
|
||||||
filename = fmt.Sprintf("%s_thumb.webp", imageID)
|
filename = fmt.Sprintf("%s_thumb.webp", imageID)
|
||||||
case "icon":
|
} else {
|
||||||
filename = fmt.Sprintf("%s_icon.webp", imageID)
|
|
||||||
case "full":
|
|
||||||
filename = fmt.Sprintf("%s_full.webp", imageID)
|
filename = fmt.Sprintf("%s_full.webp", imageID)
|
||||||
default:
|
|
||||||
return "", false, fmt.Errorf("unknown image type: %s", imageType)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we store inside: config.DriveCache.Path / images
|
// Make sure we store inside: config.DriveCache.Path / images
|
||||||
|
@ -142,12 +136,24 @@ func cacheImage(imageURL, imageID string, imageType string) (string, bool, error
|
||||||
return cachedImagePath, true, nil
|
return cachedImagePath, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decode image
|
// Decode the image based on the content type
|
||||||
img, err := safeDecodeImage(contentType, data)
|
var img image.Image
|
||||||
if err != nil {
|
switch contentType {
|
||||||
printErr("Failed to decode favicon: %s [%s] (%v)", imageURL, imageID, err)
|
case "image/jpeg":
|
||||||
|
img, err = jpeg.Decode(bytes.NewReader(data))
|
||||||
|
case "image/png":
|
||||||
|
img, err = png.Decode(bytes.NewReader(data))
|
||||||
|
case "image/gif":
|
||||||
|
img, err = gif.Decode(bytes.NewReader(data))
|
||||||
|
case "image/webp":
|
||||||
|
img, err = webp.Decode(bytes.NewReader(data))
|
||||||
|
case "image/bmp":
|
||||||
|
img, err = bmp.Decode(bytes.NewReader(data))
|
||||||
|
case "image/tiff":
|
||||||
|
img, err = tiff.Decode(bytes.NewReader(data))
|
||||||
|
default:
|
||||||
recordInvalidImageID(imageID)
|
recordInvalidImageID(imageID)
|
||||||
return "", false, err
|
return "", false, fmt.Errorf("unsupported image type: %s", contentType)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -219,23 +225,29 @@ func handleImageServe(w http.ResponseWriter, r *http.Request) {
|
||||||
// Adjust to read from config.DriveCache.Path / images
|
// Adjust to read from config.DriveCache.Path / images
|
||||||
cachedImagePath := filepath.Join(config.DriveCache.Path, "images", filename)
|
cachedImagePath := filepath.Join(config.DriveCache.Path, "images", filename)
|
||||||
|
|
||||||
if hasExtension && (imageType == "thumb" || imageType == "icon") {
|
if hasExtension && imageType == "thumb" {
|
||||||
|
// Requesting cached image (thumbnail or full)
|
||||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
if _, err := os.Stat(cachedImagePath); err == nil {
|
||||||
// Update the modification time
|
// Update the modification time to now
|
||||||
_ = os.Chtimes(cachedImagePath, time.Now(), time.Now())
|
err := os.Chtimes(cachedImagePath, time.Now(), time.Now())
|
||||||
w.Header().Set("Content-Type", "image/webp")
|
if err != nil {
|
||||||
|
printWarn("Failed to update modification time for %s: %v", cachedImagePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine content type based on file extension
|
||||||
|
contentType := "image/webp"
|
||||||
|
w.Header().Set("Content-Type", contentType)
|
||||||
w.Header().Set("Cache-Control", "public, max-age=31536000")
|
w.Header().Set("Cache-Control", "public, max-age=31536000")
|
||||||
http.ServeFile(w, r, cachedImagePath)
|
http.ServeFile(w, r, cachedImagePath)
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
|
// Cached image not found
|
||||||
if config.DriveCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
if imageType == "icon" {
|
// Thumbnail should be cached, but not found
|
||||||
serveGlobeImage(w, r)
|
|
||||||
} else {
|
|
||||||
serveMissingImage(w, r)
|
serveMissingImage(w, r)
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
// Else, proceed to proxy if caching is disabled
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -311,12 +323,8 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
invalidImageIDsMu.Unlock()
|
invalidImageIDsMu.Unlock()
|
||||||
|
|
||||||
if isInvalid {
|
if isInvalid {
|
||||||
// Image is invalid; provide appropriate fallback
|
// Image is invalid; inform the frontend by setting the missing image URL
|
||||||
if strings.HasSuffix(id, "_icon.webp") || strings.HasSuffix(id, "_icon") {
|
statusMap[id] = "/static/images/missing.svg"
|
||||||
statusMap[id] = "/images/globe.svg"
|
|
||||||
} else {
|
|
||||||
statusMap[id] = "/images/missing.svg"
|
|
||||||
}
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,15 +332,11 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
extensions := []string{"webp", "svg"} // Extensions without leading dots
|
extensions := []string{"webp", "svg"} // Extensions without leading dots
|
||||||
imageReady := false
|
imageReady := false
|
||||||
|
|
||||||
|
// Check thumbnail first
|
||||||
for _, ext := range extensions {
|
for _, ext := range extensions {
|
||||||
thumbPath := filepath.Join(config.DriveCache.Path, "images", fmt.Sprintf("%s_thumb.%s", id, ext))
|
thumbFilename := fmt.Sprintf("%s_thumb.%s", id, ext)
|
||||||
iconPath := filepath.Join(config.DriveCache.Path, "images", fmt.Sprintf("%s_icon.%s", id, ext))
|
thumbPath := filepath.Join(config.DriveCache.Path, "images", thumbFilename)
|
||||||
|
|
||||||
if _, err := os.Stat(iconPath); err == nil {
|
|
||||||
statusMap[id] = fmt.Sprintf("/image/%s_icon.%s", id, ext)
|
|
||||||
imageReady = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if _, err := os.Stat(thumbPath); err == nil {
|
if _, err := os.Stat(thumbPath); err == nil {
|
||||||
statusMap[id] = fmt.Sprintf("/image/%s_thumb.%s", id, ext)
|
statusMap[id] = fmt.Sprintf("/image/%s_thumb.%s", id, ext)
|
||||||
imageReady = true
|
imageReady = true
|
||||||
|
@ -356,13 +360,11 @@ func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
// If neither is ready and image is not invalid
|
// If neither is ready and image is not invalid
|
||||||
if !imageReady {
|
if !imageReady {
|
||||||
// Distinguish favicon vs image fallback
|
if !config.DriveCacheEnabled {
|
||||||
if strings.HasSuffix(id, "_icon.webp") || strings.HasSuffix(id, "_icon") {
|
// Hard cache is disabled; use the proxy URL
|
||||||
statusMap[id] = "/images/globe.svg"
|
statusMap[id] = fmt.Sprintf("/image/%s_thumb", id)
|
||||||
} else if !config.DriveCacheEnabled {
|
|
||||||
statusMap[id] = "/images/missing.svg"
|
|
||||||
}
|
}
|
||||||
// else: leave it unset — frontend will retry
|
// Else, do not set statusMap[id]; the frontend will keep checking
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,33 +510,6 @@ func cleanupCache() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func safeDecodeImage(contentType string, data []byte) (img image.Image, err error) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
err = fmt.Errorf("image decode panic: %v", r)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
switch contentType {
|
|
||||||
case "image/x-icon", "image/vnd.microsoft.icon":
|
|
||||||
img, err = ico.Decode(bytes.NewReader(data))
|
|
||||||
case "image/jpeg":
|
|
||||||
img, err = jpeg.Decode(bytes.NewReader(data))
|
|
||||||
case "image/png":
|
|
||||||
img, err = png.Decode(bytes.NewReader(data))
|
|
||||||
case "image/gif":
|
|
||||||
img, err = gif.Decode(bytes.NewReader(data))
|
|
||||||
case "image/webp":
|
|
||||||
img, err = webp.Decode(bytes.NewReader(data))
|
|
||||||
case "image/bmp":
|
|
||||||
img, err = bmp.Decode(bytes.NewReader(data))
|
|
||||||
case "image/tiff":
|
|
||||||
img, err = tiff.Decode(bytes.NewReader(data))
|
|
||||||
default:
|
|
||||||
err = fmt.Errorf("unsupported image type: %s", contentType)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serve missing.svg
|
// Serve missing.svg
|
||||||
func serveMissingImage(w http.ResponseWriter, r *http.Request) {
|
func serveMissingImage(w http.ResponseWriter, r *http.Request) {
|
||||||
missingImagePath := filepath.Join("static", "images", "missing.svg")
|
missingImagePath := filepath.Join("static", "images", "missing.svg")
|
||||||
|
@ -542,25 +517,8 @@ func serveMissingImage(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Header().Set("Cache-Control", "no-store, must-revalidate")
|
w.Header().Set("Cache-Control", "no-store, must-revalidate")
|
||||||
w.Header().Set("Pragma", "no-cache")
|
w.Header().Set("Pragma", "no-cache")
|
||||||
w.Header().Set("Expires", "0")
|
w.Header().Set("Expires", "0")
|
||||||
|
if config.DriveCacheEnabled {
|
||||||
|
w.WriteHeader(http.StatusNotFound)
|
||||||
|
}
|
||||||
http.ServeFile(w, r, missingImagePath)
|
http.ServeFile(w, r, missingImagePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func serveGlobeImage(w http.ResponseWriter, r *http.Request) {
|
|
||||||
globePath := filepath.Join("static", "images", "globe.svg")
|
|
||||||
|
|
||||||
// Set error code FIRST
|
|
||||||
w.WriteHeader(http.StatusNotFound)
|
|
||||||
|
|
||||||
// Now read the file and write it manually, to avoid conflict with http.ServeFile
|
|
||||||
data, err := os.ReadFile(globePath)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "globe.svg not found", http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "image/svg+xml")
|
|
||||||
w.Header().Set("Cache-Control", "no-store, must-revalidate")
|
|
||||||
w.Header().Set("Pragma", "no-cache")
|
|
||||||
w.Header().Set("Expires", "0")
|
|
||||||
_, _ = w.Write(data)
|
|
||||||
}
|
|
||||||
|
|
52
cache.go
52
cache.go
|
@ -11,6 +11,7 @@ import (
|
||||||
// SearchResult is a generic interface for all types of search results.
|
// SearchResult is a generic interface for all types of search results.
|
||||||
type SearchResult interface{}
|
type SearchResult interface{}
|
||||||
|
|
||||||
|
// Define various search result types implementing SearchResult interface
|
||||||
type TextSearchResult struct {
|
type TextSearchResult struct {
|
||||||
URL string
|
URL string
|
||||||
Header string
|
Header string
|
||||||
|
@ -61,18 +62,6 @@ type ForumSearchResult struct {
|
||||||
ThumbnailSrc string `json:"thumbnailSrc,omitempty"`
|
ThumbnailSrc string `json:"thumbnailSrc,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type MusicResult struct {
|
|
||||||
URL string
|
|
||||||
Title string
|
|
||||||
Artist string
|
|
||||||
Description string
|
|
||||||
PublishedDate string
|
|
||||||
Thumbnail string
|
|
||||||
// AudioURL string
|
|
||||||
Source string
|
|
||||||
Duration string
|
|
||||||
}
|
|
||||||
|
|
||||||
// GeocodeCachedItem represents a geocoding result stored in the cache.
|
// GeocodeCachedItem represents a geocoding result stored in the cache.
|
||||||
type GeocodeCachedItem struct {
|
type GeocodeCachedItem struct {
|
||||||
Latitude string
|
Latitude string
|
||||||
|
@ -134,11 +123,6 @@ func NewGeocodeCache() *GeocodeCache {
|
||||||
|
|
||||||
// Get retrieves the results for a given key from the cache.
|
// Get retrieves the results for a given key from the cache.
|
||||||
func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
|
func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
|
||||||
// Skip if RAM caching is disabled
|
|
||||||
if !config.RamCacheEnabled {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
rc.mu.Lock()
|
rc.mu.Lock()
|
||||||
defer rc.mu.Unlock()
|
defer rc.mu.Unlock()
|
||||||
|
|
||||||
|
@ -159,11 +143,6 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
|
||||||
|
|
||||||
// Set stores the results for a given key in the cache.
|
// Set stores the results for a given key in the cache.
|
||||||
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
|
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
|
||||||
// Skip if RAM caching is disabled
|
|
||||||
if !config.RamCacheEnabled {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
rc.mu.Lock()
|
rc.mu.Lock()
|
||||||
defer rc.mu.Unlock()
|
defer rc.mu.Unlock()
|
||||||
|
|
||||||
|
@ -183,11 +162,6 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
|
||||||
|
|
||||||
// checkAndCleanCache removes items if memory usage exceeds the limit.
|
// checkAndCleanCache removes items if memory usage exceeds the limit.
|
||||||
func (rc *ResultsCache) checkAndCleanCache() {
|
func (rc *ResultsCache) checkAndCleanCache() {
|
||||||
// Skip if RAM caching is disabled
|
|
||||||
if !config.RamCacheEnabled {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
|
if rc.currentMemoryUsage() > config.RamCache.MaxUsageBytes {
|
||||||
rc.cleanOldestItems()
|
rc.cleanOldestItems()
|
||||||
}
|
}
|
||||||
|
@ -205,11 +179,6 @@ func (rc *ResultsCache) currentMemoryUsage() uint64 {
|
||||||
|
|
||||||
// Get retrieves the geocoding result for a given query from the cache.
|
// Get retrieves the geocoding result for a given query from the cache.
|
||||||
func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found bool, exists bool) {
|
func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found bool, exists bool) {
|
||||||
// Skip if RAM caching is disabled
|
|
||||||
if !config.RamCacheEnabled {
|
|
||||||
return "", "", false, false
|
|
||||||
}
|
|
||||||
|
|
||||||
gc.mu.Lock()
|
gc.mu.Lock()
|
||||||
defer gc.mu.Unlock()
|
defer gc.mu.Unlock()
|
||||||
|
|
||||||
|
@ -229,11 +198,6 @@ func (gc *GeocodeCache) Get(query string) (latitude, longitude string, found boo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (gc *GeocodeCache) Set(query, latitude, longitude string, found bool) {
|
func (gc *GeocodeCache) Set(query, latitude, longitude string, found bool) {
|
||||||
// Skip if RAM caching is disabled
|
|
||||||
if !config.RamCacheEnabled {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
gc.mu.Lock()
|
gc.mu.Lock()
|
||||||
defer gc.mu.Unlock()
|
defer gc.mu.Unlock()
|
||||||
|
|
||||||
|
@ -295,23 +259,15 @@ func convertToSearchResults(results interface{}) []SearchResult {
|
||||||
genericResults[i] = r
|
genericResults[i] = r
|
||||||
}
|
}
|
||||||
return genericResults
|
return genericResults
|
||||||
case []MusicResult:
|
|
||||||
genericResults := make([]SearchResult, len(res))
|
|
||||||
for i, r := range res {
|
|
||||||
genericResults[i] = r
|
|
||||||
}
|
|
||||||
return genericResults
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult, []MusicResult) {
|
func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []TorrentResult, []ImageSearchResult, []ForumSearchResult) {
|
||||||
var textResults []TextSearchResult
|
var textResults []TextSearchResult
|
||||||
var torrentResults []TorrentResult
|
var torrentResults []TorrentResult
|
||||||
var imageResults []ImageSearchResult
|
var imageResults []ImageSearchResult
|
||||||
var forumResults []ForumSearchResult
|
var forumResults []ForumSearchResult
|
||||||
var musicResults []MusicResult
|
|
||||||
|
|
||||||
for _, r := range results {
|
for _, r := range results {
|
||||||
switch res := r.(type) {
|
switch res := r.(type) {
|
||||||
case TextSearchResult:
|
case TextSearchResult:
|
||||||
|
@ -322,9 +278,7 @@ func convertToSpecificResults(results []SearchResult) ([]TextSearchResult, []Tor
|
||||||
imageResults = append(imageResults, res)
|
imageResults = append(imageResults, res)
|
||||||
case ForumSearchResult:
|
case ForumSearchResult:
|
||||||
forumResults = append(forumResults, res)
|
forumResults = append(forumResults, res)
|
||||||
case MusicResult:
|
|
||||||
musicResults = append(musicResults, res)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return textResults, torrentResults, imageResults, forumResults, musicResults
|
return textResults, torrentResults, imageResults, forumResults
|
||||||
}
|
}
|
||||||
|
|
114
common.go
114
common.go
|
@ -8,7 +8,6 @@ import (
|
||||||
"html/template"
|
"html/template"
|
||||||
mathrand "math/rand"
|
mathrand "math/rand"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
@ -29,59 +28,14 @@ var (
|
||||||
}
|
}
|
||||||
return string(jsonBytes), nil
|
return string(jsonBytes), nil
|
||||||
},
|
},
|
||||||
"formatShortDate": func(date string) string {
|
|
||||||
t, _ := time.Parse("2006-01-02", date)
|
|
||||||
// return t.Format("Mon") // e.g. "Sat"
|
|
||||||
return t.Format("2.1.") // e.g. "29.6."
|
|
||||||
},
|
|
||||||
"weatherIcon": func(cur interface{}) string {
|
|
||||||
switch c := cur.(type) {
|
|
||||||
case map[string]interface{}:
|
|
||||||
if cond, ok := c["Condition"].(string); ok {
|
|
||||||
return iconForCond(cond)
|
|
||||||
}
|
|
||||||
case WeatherCurrent:
|
|
||||||
return iconForCond(c.Condition)
|
|
||||||
case *WeatherCurrent:
|
|
||||||
return iconForCond(c.Condition)
|
|
||||||
}
|
|
||||||
return "🌈"
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func iconForCond(cond string) string {
|
|
||||||
switch cond {
|
|
||||||
case "Clear":
|
|
||||||
return "☀️"
|
|
||||||
case "Partly cloudy":
|
|
||||||
return "⛅"
|
|
||||||
case "Cloudy":
|
|
||||||
return "☁️"
|
|
||||||
case "Rain":
|
|
||||||
return "🌧️"
|
|
||||||
case "Snow":
|
|
||||||
return "❄️"
|
|
||||||
case "Thunderstorm":
|
|
||||||
return "⛈️"
|
|
||||||
case "Fog":
|
|
||||||
return "🌫️"
|
|
||||||
default:
|
|
||||||
return "🌈"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type SearchEngine struct {
|
type SearchEngine struct {
|
||||||
Name string
|
Name string
|
||||||
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
|
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type LinkParts struct {
|
|
||||||
Domain template.HTML
|
|
||||||
Path template.HTML
|
|
||||||
RootURL string // used by getFaviconProxyURL()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to render templates without elapsed time measurement
|
// Helper function to render templates without elapsed time measurement
|
||||||
func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]interface{}) {
|
func renderTemplate(w http.ResponseWriter, tmplName string, data map[string]interface{}) {
|
||||||
// Generate icon paths for SVG and PNG, including a 1/10 chance for an alternate icon
|
// Generate icon paths for SVG and PNG, including a 1/10 chance for an alternate icon
|
||||||
|
@ -153,71 +107,3 @@ func GetIconPath() (string, string) {
|
||||||
// Default paths
|
// Default paths
|
||||||
return "/static/images/icon.svg", "/static/images/icon.png"
|
return "/static/images/icon.svg", "/static/images/icon.png"
|
||||||
}
|
}
|
||||||
|
|
||||||
// FormatElapsedTime formats elapsed time as a string,
|
|
||||||
// using:
|
|
||||||
// - "> 0.01 ms" if under 49µs
|
|
||||||
// - "0.xx ms" if under 1ms
|
|
||||||
// - "xxx ms" if under 300ms
|
|
||||||
// - "x.xx seconds" otherwise
|
|
||||||
func FormatElapsedTime(elapsed time.Duration) string {
|
|
||||||
if elapsed < 49*time.Microsecond {
|
|
||||||
return fmt.Sprintf("> 0.01 %s", Translate("milliseconds"))
|
|
||||||
} else if elapsed < time.Millisecond {
|
|
||||||
ms := float64(elapsed.Microseconds()) / 1000.0
|
|
||||||
return fmt.Sprintf("%.2f %s", ms, Translate("milliseconds"))
|
|
||||||
} else if elapsed < 300*time.Millisecond {
|
|
||||||
return fmt.Sprintf("%d %s", elapsed.Milliseconds(), Translate("milliseconds"))
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%.2f %s", elapsed.Seconds(), Translate("seconds"))
|
|
||||||
}
|
|
||||||
func FormatURLParts(rawURL string) (domain, path, rootURL string) {
|
|
||||||
parsed, err := url.Parse(rawURL)
|
|
||||||
if err != nil || parsed.Host == "" {
|
|
||||||
return "", "", ""
|
|
||||||
}
|
|
||||||
|
|
||||||
domain = parsed.Host
|
|
||||||
if strings.HasPrefix(domain, "www.") {
|
|
||||||
domain = domain[4:]
|
|
||||||
}
|
|
||||||
|
|
||||||
rootURL = parsed.Scheme + "://" + parsed.Host
|
|
||||||
|
|
||||||
path = strings.Trim(parsed.Path, "/")
|
|
||||||
pathSegments := strings.Split(path, "/")
|
|
||||||
var cleanSegments []string
|
|
||||||
for _, seg := range pathSegments {
|
|
||||||
if seg != "" {
|
|
||||||
cleanSegments = append(cleanSegments, seg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
path = strings.Join(cleanSegments, "/")
|
|
||||||
return domain, path, rootURL
|
|
||||||
}
|
|
||||||
|
|
||||||
func FormatLinkHTML(rawURL string) LinkParts {
|
|
||||||
domain, path, root := FormatURLParts(rawURL)
|
|
||||||
|
|
||||||
lp := LinkParts{
|
|
||||||
RootURL: root,
|
|
||||||
}
|
|
||||||
|
|
||||||
lp.Domain = template.HTML(fmt.Sprintf(`<span class="result-domain">%s</span>`, template.HTMLEscapeString(domain)))
|
|
||||||
|
|
||||||
if path != "" {
|
|
||||||
pathDisplay := strings.ReplaceAll(path, "/", " › ")
|
|
||||||
lp.Path = template.HTML(fmt.Sprintf(`<span class="result-path"> › %s</span>`, template.HTMLEscapeString(pathDisplay)))
|
|
||||||
}
|
|
||||||
|
|
||||||
return lp
|
|
||||||
}
|
|
||||||
|
|
||||||
// Converts any struct to a map[string]interface{} using JSON round-trip.
|
|
||||||
// Useful for rendering templates with generic map input.
|
|
||||||
func toMap(data interface{}) map[string]interface{} {
|
|
||||||
jsonBytes, _ := json.Marshal(data)
|
|
||||||
var result map[string]interface{}
|
|
||||||
_ = json.Unmarshal(jsonBytes, &result)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
257
config.go
257
config.go
|
@ -4,8 +4,10 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/shirou/gopsutil/mem"
|
"github.com/shirou/gopsutil/mem"
|
||||||
|
@ -20,42 +22,24 @@ type CacheConfig struct {
|
||||||
Path string
|
Path string
|
||||||
}
|
}
|
||||||
|
|
||||||
type MetaSearchConfig struct {
|
|
||||||
Text []string
|
|
||||||
Image []string
|
|
||||||
Files []string
|
|
||||||
Video []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Port int
|
Port int // Added
|
||||||
NodeID string
|
AuthCode string // Added
|
||||||
Nodes []string
|
PeerID string // Added
|
||||||
Domain string
|
Peers []string
|
||||||
NodesEnabled bool
|
Domain string // Added
|
||||||
MetaSearchEnabled bool
|
NodesEnabled bool // Added
|
||||||
IndexerEnabled bool
|
CrawlerEnabled bool // Added
|
||||||
WebsiteEnabled bool
|
IndexerEnabled bool // Added
|
||||||
|
WebsiteEnabled bool // Added
|
||||||
RamCacheEnabled bool
|
RamCacheEnabled bool
|
||||||
DriveCacheEnabled bool
|
DriveCacheEnabled bool // Added
|
||||||
MetaProxyEnabled bool
|
LogLevel int // Added
|
||||||
MetaProxyStrict bool
|
|
||||||
MetaProxyRetry int
|
|
||||||
MetaProxies []string
|
|
||||||
CrawlerProxyEnabled bool
|
|
||||||
CrawlerProxyStrict bool
|
|
||||||
CrawlerProxies []string
|
|
||||||
CrawlerProxyRetry int
|
|
||||||
// Maybye add Proxy support for Image Extraction?
|
|
||||||
LogLevel int
|
|
||||||
ConcurrentStandardCrawlers int
|
ConcurrentStandardCrawlers int
|
||||||
ConcurrentChromeCrawlers int
|
ConcurrentChromeCrawlers int
|
||||||
CrawlingInterval time.Duration // Refres crawled results in...
|
CrawlingInterval time.Duration // Refres crawled results in...
|
||||||
MaxPagesPerDomain int // Max pages to crawl per domain
|
MaxPagesPerDomain int // Max pages to crawl per domain
|
||||||
IndexBatchSize int
|
IndexBatchSize int
|
||||||
LibreXInstances []string
|
|
||||||
|
|
||||||
MetaSearch MetaSearchConfig
|
|
||||||
|
|
||||||
DriveCache CacheConfig
|
DriveCache CacheConfig
|
||||||
RamCache CacheConfig
|
RamCache CacheConfig
|
||||||
|
@ -64,54 +48,20 @@ type Config struct {
|
||||||
var defaultConfig = Config{
|
var defaultConfig = Config{
|
||||||
Port: 5000,
|
Port: 5000,
|
||||||
Domain: "localhost",
|
Domain: "localhost",
|
||||||
Nodes: []string{},
|
Peers: []string{},
|
||||||
|
AuthCode: generateStrongRandomString(64),
|
||||||
NodesEnabled: false,
|
NodesEnabled: false,
|
||||||
MetaSearchEnabled: true,
|
CrawlerEnabled: true,
|
||||||
IndexerEnabled: false,
|
IndexerEnabled: false,
|
||||||
WebsiteEnabled: true,
|
WebsiteEnabled: true,
|
||||||
RamCacheEnabled: true,
|
RamCacheEnabled: true,
|
||||||
DriveCacheEnabled: false,
|
DriveCacheEnabled: false,
|
||||||
MetaProxyEnabled: false,
|
|
||||||
MetaProxyStrict: true,
|
|
||||||
MetaProxies: []string{},
|
|
||||||
MetaProxyRetry: 3,
|
|
||||||
CrawlerProxyEnabled: false,
|
|
||||||
CrawlerProxyStrict: true,
|
|
||||||
CrawlerProxies: []string{},
|
|
||||||
CrawlerProxyRetry: 1,
|
|
||||||
ConcurrentStandardCrawlers: 12,
|
ConcurrentStandardCrawlers: 12,
|
||||||
ConcurrentChromeCrawlers: 4,
|
ConcurrentChromeCrawlers: 4,
|
||||||
CrawlingInterval: 24 * time.Hour,
|
CrawlingInterval: 24 * time.Hour,
|
||||||
MaxPagesPerDomain: 10,
|
MaxPagesPerDomain: 10,
|
||||||
IndexBatchSize: 50,
|
IndexBatchSize: 50,
|
||||||
LogLevel: 1,
|
LogLevel: 1,
|
||||||
LibreXInstances: []string{"librex.antopie.org"},
|
|
||||||
MetaSearch: MetaSearchConfig{
|
|
||||||
// For Text search (skip SearXNG and LibreX by default, as that would be mega stupid)
|
|
||||||
Text: []string{"Google", "Brave", "DuckDuckGo"},
|
|
||||||
|
|
||||||
// For Image search
|
|
||||||
Image: []string{"Qwant", "Bing", "DeviantArt"},
|
|
||||||
|
|
||||||
// For Files search
|
|
||||||
Files: []string{"TorrentGalaxy", "ThePirateBay", "Nyaa"},
|
|
||||||
|
|
||||||
// For Video (piped instances)
|
|
||||||
Video: []string{
|
|
||||||
"api.piped.yt",
|
|
||||||
"pipedapi.moomoo.me",
|
|
||||||
"pipedapi.darkness.services",
|
|
||||||
"pipedapi.kavin.rocks",
|
|
||||||
"piped-api.hostux.net",
|
|
||||||
"pipedapi.syncpundit.io",
|
|
||||||
"piped-api.cfe.re",
|
|
||||||
"pipedapi.in.projectsegfau.lt",
|
|
||||||
"piapi.ggtyler.dev",
|
|
||||||
"piped-api.codespace.cz",
|
|
||||||
"pipedapi.coldforge.xyz",
|
|
||||||
"pipedapi.osphost.fi",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DriveCache: CacheConfig{
|
DriveCache: CacheConfig{
|
||||||
Duration: 48 * time.Hour, // Added
|
Duration: 48 * time.Hour, // Added
|
||||||
Path: "./cache", // Added
|
Path: "./cache", // Added
|
||||||
|
@ -266,6 +216,12 @@ func createConfig() error {
|
||||||
config = defaultConfig
|
config = defaultConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generate AuthCode if missing
|
||||||
|
if config.AuthCode == "" {
|
||||||
|
config.AuthCode = generateStrongRandomString(64)
|
||||||
|
printMessage("Generated connection code: %s\n", config.AuthCode)
|
||||||
|
}
|
||||||
|
|
||||||
saveConfig(config)
|
saveConfig(config)
|
||||||
printInfo("Configuration saved successfully.")
|
printInfo("Configuration saved successfully.")
|
||||||
return nil
|
return nil
|
||||||
|
@ -280,59 +236,26 @@ func saveConfig(config Config) {
|
||||||
sec.Key("Domain").SetValue(config.Domain)
|
sec.Key("Domain").SetValue(config.Domain)
|
||||||
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
|
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
|
||||||
|
|
||||||
// Nodes section
|
// Peers section
|
||||||
nodesSec := cfg.Section("Nodes")
|
peersSec := cfg.Section("Peers")
|
||||||
nodesSec.Key("NodeID").SetValue(config.NodeID)
|
peersSec.Key("AuthCode").SetValue(config.AuthCode)
|
||||||
nodesSec.Key("Nodes").SetValue(strings.Join(config.Nodes, ","))
|
peersSec.Key("PeerID").SetValue(config.PeerID)
|
||||||
|
peersSec.Key("Peers").SetValue(strings.Join(config.Peers, ","))
|
||||||
|
|
||||||
// Features section
|
// Features section
|
||||||
if config.NodesEnabled != defaultConfig.NodesEnabled ||
|
|
||||||
config.MetaSearchEnabled != defaultConfig.MetaSearchEnabled ||
|
|
||||||
config.IndexerEnabled != defaultConfig.IndexerEnabled ||
|
|
||||||
config.WebsiteEnabled != defaultConfig.WebsiteEnabled ||
|
|
||||||
config.MetaProxyEnabled != defaultConfig.MetaProxyEnabled ||
|
|
||||||
config.CrawlerProxyEnabled != defaultConfig.CrawlerProxyEnabled {
|
|
||||||
|
|
||||||
featuresSec := cfg.Section("Features")
|
featuresSec := cfg.Section("Features")
|
||||||
setBoolIfChanged(featuresSec, "Nodes", config.NodesEnabled, defaultConfig.NodesEnabled)
|
featuresSec.Key("Nodes").SetValue(strconv.FormatBool(config.NodesEnabled))
|
||||||
setBoolIfChanged(featuresSec, "Crawler", config.MetaSearchEnabled, defaultConfig.MetaSearchEnabled)
|
featuresSec.Key("Crawler").SetValue(strconv.FormatBool(config.CrawlerEnabled))
|
||||||
setBoolIfChanged(featuresSec, "Indexer", config.IndexerEnabled, defaultConfig.IndexerEnabled)
|
featuresSec.Key("Indexer").SetValue(strconv.FormatBool(config.IndexerEnabled))
|
||||||
setBoolIfChanged(featuresSec, "Website", config.WebsiteEnabled, defaultConfig.WebsiteEnabled)
|
featuresSec.Key("Website").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
||||||
setBoolIfChanged(featuresSec, "MetaProxy", config.MetaProxyEnabled, defaultConfig.MetaProxyEnabled)
|
|
||||||
setBoolIfChanged(featuresSec, "CrawlerProxy", config.CrawlerProxyEnabled, defaultConfig.CrawlerProxyEnabled)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Proxies section
|
|
||||||
proxiesSec := cfg.Section("Proxies")
|
|
||||||
proxiesSec.Key("MetaProxyStrict").SetValue(strconv.FormatBool(config.MetaProxyStrict))
|
|
||||||
proxiesSec.Key("MetaProxies").SetValue(strings.Join(config.MetaProxies, ","))
|
|
||||||
setBoolIfChanged(proxiesSec, "CrawlerProxyStrict", config.CrawlerProxyStrict, defaultConfig.CrawlerProxyStrict)
|
|
||||||
setSliceIfChanged(proxiesSec, "CrawlerProxies", config.CrawlerProxies, defaultConfig.CrawlerProxies)
|
|
||||||
proxiesSec.Key("MetaProxyRetry").SetValue(strconv.Itoa(config.MetaProxyRetry))
|
|
||||||
setIntIfChanged(proxiesSec, "CrawlerProxyRetry", config.CrawlerProxyRetry, defaultConfig.CrawlerProxyRetry)
|
|
||||||
|
|
||||||
// MetaSearch section
|
|
||||||
metaSec := cfg.Section("MetaSearch")
|
|
||||||
metaSec.Key("LibreXInstances").SetValue(strings.Join(config.LibreXInstances, ","))
|
|
||||||
metaSec.Key("Text").SetValue(strings.Join(config.MetaSearch.Text, ","))
|
|
||||||
metaSec.Key("Image").SetValue(strings.Join(config.MetaSearch.Image, ","))
|
|
||||||
metaSec.Key("Files").SetValue(strings.Join(config.MetaSearch.Files, ","))
|
|
||||||
metaSec.Key("Video").SetValue(strings.Join(config.MetaSearch.Video, ","))
|
|
||||||
|
|
||||||
// Indexer section
|
// Indexer section
|
||||||
if config.ConcurrentStandardCrawlers != defaultConfig.ConcurrentStandardCrawlers ||
|
|
||||||
config.ConcurrentChromeCrawlers != defaultConfig.ConcurrentChromeCrawlers ||
|
|
||||||
config.CrawlingInterval != defaultConfig.CrawlingInterval ||
|
|
||||||
config.MaxPagesPerDomain != defaultConfig.MaxPagesPerDomain ||
|
|
||||||
config.IndexBatchSize != defaultConfig.IndexBatchSize {
|
|
||||||
|
|
||||||
indexerSec := cfg.Section("Indexer")
|
indexerSec := cfg.Section("Indexer")
|
||||||
setIntIfChanged(indexerSec, "ConcurrentStandardCrawlers", config.ConcurrentStandardCrawlers, defaultConfig.ConcurrentStandardCrawlers)
|
indexerSec.Key("ConcurrentStandardCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
|
||||||
setIntIfChanged(indexerSec, "ConcurrentChromeCrawlers", config.ConcurrentChromeCrawlers, defaultConfig.ConcurrentChromeCrawlers)
|
indexerSec.Key("ConcurrentChromeCrawlers").SetValue(strconv.Itoa(config.ConcurrentStandardCrawlers))
|
||||||
setIfChanged(indexerSec, "CrawlingInterval", config.CrawlingInterval.String(), defaultConfig.CrawlingInterval.String())
|
indexerSec.Key("CrawlingInterval").SetValue(config.CrawlingInterval.String())
|
||||||
setIntIfChanged(indexerSec, "MaxPagesPerDomain", config.MaxPagesPerDomain, defaultConfig.MaxPagesPerDomain)
|
indexerSec.Key("MaxPagesPerDomain").SetValue(strconv.Itoa(config.MaxPagesPerDomain))
|
||||||
setIntIfChanged(indexerSec, "IndexBatchSize", config.IndexBatchSize, defaultConfig.IndexBatchSize)
|
indexerSec.Key("IndexBatchSize").SetValue(strconv.Itoa(config.IndexBatchSize))
|
||||||
}
|
|
||||||
|
|
||||||
// DriveCache section
|
// DriveCache section
|
||||||
driveSec := cfg.Section("DriveCache")
|
driveSec := cfg.Section("DriveCache")
|
||||||
|
@ -363,48 +286,18 @@ func loadConfig() Config {
|
||||||
domain := getConfigValueString(cfg.Section("Server").Key("Domain"), defaultConfig.Domain)
|
domain := getConfigValueString(cfg.Section("Server").Key("Domain"), defaultConfig.Domain)
|
||||||
logLevel := getConfigValue(cfg.Section("Server").Key("LogLevel"), defaultConfig.LogLevel, strconv.Atoi)
|
logLevel := getConfigValue(cfg.Section("Server").Key("LogLevel"), defaultConfig.LogLevel, strconv.Atoi)
|
||||||
|
|
||||||
// Nodes
|
// Peers
|
||||||
nodeID := getConfigValueString(cfg.Section("Nodes").Key("NodeID"), defaultConfig.NodeID)
|
authCode := getConfigValueString(cfg.Section("Peers").Key("AuthCode"), defaultConfig.AuthCode)
|
||||||
nodes := strings.Split(getConfigValueString(cfg.Section("Nodes").Key("Nodes"), ""), ",")
|
peers := strings.Split(getConfigValueString(cfg.Section("Peers").Key("Peers"), ""), ",")
|
||||||
|
|
||||||
// Features
|
// Features
|
||||||
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
|
nodesEnabled := getConfigValueBool(cfg.Section("Features").Key("Nodes"), defaultConfig.NodesEnabled)
|
||||||
metaSearchEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.MetaSearchEnabled)
|
crawlerEnabled := getConfigValueBool(cfg.Section("Features").Key("Crawler"), defaultConfig.CrawlerEnabled)
|
||||||
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
|
indexerEnabled := getConfigValueBool(cfg.Section("Features").Key("Indexer"), defaultConfig.IndexerEnabled)
|
||||||
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
|
websiteEnabled := getConfigValueBool(cfg.Section("Features").Key("Website"), defaultConfig.WebsiteEnabled)
|
||||||
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
|
ramCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("RamCache"), defaultConfig.RamCacheEnabled)
|
||||||
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
|
driveCacheEnabled := getConfigValueBool(cfg.Section("Features").Key("DriveCache"), defaultConfig.DriveCacheEnabled)
|
||||||
metaProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("MetaProxy"), defaultConfig.MetaProxyEnabled)
|
|
||||||
crawlerProxyEnabled := getConfigValueBool(cfg.Section("Features").Key("CrawlerProxy"), defaultConfig.CrawlerProxyEnabled)
|
|
||||||
|
|
||||||
// Nodes
|
|
||||||
metaProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("MetaProxyStrict"), defaultConfig.MetaProxyStrict)
|
|
||||||
metaProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("MetaProxies"), ""), ",")
|
|
||||||
crawlerProxyStrict := getConfigValueBool(cfg.Section("Proxies").Key("CrawlerProxyStrict"), defaultConfig.CrawlerProxyStrict)
|
|
||||||
crawlerProxies := strings.Split(getConfigValueString(cfg.Section("Proxies").Key("CrawlerProxies"), ""), ",")
|
|
||||||
metaProxyRetry := getConfigValue(cfg.Section("Proxies").Key("MetaProxyRetry"), defaultConfig.MetaProxyRetry, strconv.Atoi)
|
|
||||||
crawlerProxyRetry := getConfigValue(cfg.Section("Proxies").Key("CrawlerProxyRetry"), defaultConfig.CrawlerProxyRetry, strconv.Atoi)
|
|
||||||
|
|
||||||
// MetaSearch
|
|
||||||
searchXInstances := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("LibreXInstances"), strings.Join(defaultConfig.LibreXInstances, ",")), ",")
|
|
||||||
textList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Text"), strings.Join(defaultConfig.MetaSearch.Text, ",")), ",")
|
|
||||||
imageList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Image"), strings.Join(defaultConfig.MetaSearch.Image, ",")), ",")
|
|
||||||
filesList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Files"), strings.Join(defaultConfig.MetaSearch.Files, ",")), ",")
|
|
||||||
videoList := strings.Split(getConfigValueString(cfg.Section("MetaSearch").Key("Video"), strings.Join(defaultConfig.MetaSearch.Video, ",")), ",")
|
|
||||||
|
|
||||||
// Load default values for MetaSearch if they are empty
|
|
||||||
if isEmptyList(textList) {
|
|
||||||
textList = defaultConfig.MetaSearch.Text
|
|
||||||
}
|
|
||||||
if isEmptyList(imageList) {
|
|
||||||
imageList = defaultConfig.MetaSearch.Image
|
|
||||||
}
|
|
||||||
if isEmptyList(filesList) {
|
|
||||||
filesList = defaultConfig.MetaSearch.Files
|
|
||||||
}
|
|
||||||
if isEmptyList(videoList) {
|
|
||||||
videoList = defaultConfig.MetaSearch.Video
|
|
||||||
}
|
|
||||||
// Indexing
|
// Indexing
|
||||||
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
|
concurrentStandardCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentStandardCrawlers"), defaultConfig.ConcurrentStandardCrawlers, strconv.Atoi)
|
||||||
concurrentChromeCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentChromeCrawlers"), defaultConfig.ConcurrentChromeCrawlers, strconv.Atoi)
|
concurrentChromeCrawlers := getConfigValue(cfg.Section("Indexer").Key("ConcurrentChromeCrawlers"), defaultConfig.ConcurrentChromeCrawlers, strconv.Atoi)
|
||||||
|
@ -429,34 +322,19 @@ func loadConfig() Config {
|
||||||
Port: port,
|
Port: port,
|
||||||
Domain: domain,
|
Domain: domain,
|
||||||
LogLevel: logLevel,
|
LogLevel: logLevel,
|
||||||
NodeID: nodeID,
|
AuthCode: authCode,
|
||||||
Nodes: nodes,
|
Peers: peers,
|
||||||
NodesEnabled: nodesEnabled,
|
NodesEnabled: nodesEnabled,
|
||||||
MetaSearchEnabled: metaSearchEnabled,
|
CrawlerEnabled: crawlerEnabled,
|
||||||
IndexerEnabled: indexerEnabled,
|
IndexerEnabled: indexerEnabled,
|
||||||
WebsiteEnabled: websiteEnabled,
|
WebsiteEnabled: websiteEnabled,
|
||||||
RamCacheEnabled: ramCacheEnabled,
|
RamCacheEnabled: ramCacheEnabled,
|
||||||
DriveCacheEnabled: driveCacheEnabled,
|
DriveCacheEnabled: driveCacheEnabled,
|
||||||
MetaProxyEnabled: metaProxyEnabled,
|
|
||||||
MetaProxyStrict: metaProxyStrict,
|
|
||||||
MetaProxies: metaProxies,
|
|
||||||
MetaProxyRetry: metaProxyRetry,
|
|
||||||
CrawlerProxyEnabled: crawlerProxyEnabled,
|
|
||||||
CrawlerProxyStrict: crawlerProxyStrict,
|
|
||||||
CrawlerProxies: crawlerProxies,
|
|
||||||
CrawlerProxyRetry: crawlerProxyRetry,
|
|
||||||
ConcurrentStandardCrawlers: concurrentStandardCrawlers,
|
ConcurrentStandardCrawlers: concurrentStandardCrawlers,
|
||||||
ConcurrentChromeCrawlers: concurrentChromeCrawlers,
|
ConcurrentChromeCrawlers: concurrentChromeCrawlers,
|
||||||
CrawlingInterval: crawlingInterval,
|
CrawlingInterval: crawlingInterval,
|
||||||
MaxPagesPerDomain: maxPagesPerDomain,
|
MaxPagesPerDomain: maxPagesPerDomain,
|
||||||
IndexBatchSize: indexBatchSize,
|
IndexBatchSize: indexBatchSize,
|
||||||
LibreXInstances: searchXInstances,
|
|
||||||
MetaSearch: MetaSearchConfig{
|
|
||||||
Text: textList,
|
|
||||||
Image: imageList,
|
|
||||||
Files: filesList,
|
|
||||||
Video: videoList,
|
|
||||||
},
|
|
||||||
DriveCache: CacheConfig{
|
DriveCache: CacheConfig{
|
||||||
Duration: driveDuration,
|
Duration: driveDuration,
|
||||||
MaxUsageBytes: driveMaxUsage,
|
MaxUsageBytes: driveMaxUsage,
|
||||||
|
@ -469,34 +347,6 @@ func loadConfig() Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func isEmptyList(list []string) bool {
|
|
||||||
return len(list) == 0 || (len(list) == 1 && strings.TrimSpace(list[0]) == "")
|
|
||||||
}
|
|
||||||
|
|
||||||
func setIfChanged(sec *ini.Section, key string, value string, defaultValue string) {
|
|
||||||
if value != defaultValue {
|
|
||||||
sec.Key(key).SetValue(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func setBoolIfChanged(sec *ini.Section, key string, value bool, defaultValue bool) {
|
|
||||||
if value != defaultValue {
|
|
||||||
sec.Key(key).SetValue(strconv.FormatBool(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func setIntIfChanged(sec *ini.Section, key string, value int, defaultValue int) {
|
|
||||||
if value != defaultValue {
|
|
||||||
sec.Key(key).SetValue(strconv.Itoa(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func setSliceIfChanged(sec *ini.Section, key string, value, defaultValue []string) {
|
|
||||||
if strings.Join(value, ",") != strings.Join(defaultValue, ",") {
|
|
||||||
sec.Key(key).SetValue(strings.Join(value, ","))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// getConfigValue retrieves a configuration value or returns a default value from defaultConfig.
|
// getConfigValue retrieves a configuration value or returns a default value from defaultConfig.
|
||||||
func getConfigValue[T any](key *ini.Key, defaultValue T, parseFunc func(string) (T, error)) T {
|
func getConfigValue[T any](key *ini.Key, defaultValue T, parseFunc func(string) (T, error)) T {
|
||||||
if key == nil || key.String() == "" {
|
if key == nil || key.String() == "" {
|
||||||
|
@ -582,6 +432,27 @@ func parseMaxUsageDrive(value string, cachePath string) uint64 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get total disk space of the system where cachePath resides
|
||||||
|
func getTotalDiskSpace(cachePath string) uint64 {
|
||||||
|
var stat syscall.Statfs_t
|
||||||
|
|
||||||
|
// Get filesystem stats for the cache path
|
||||||
|
absPath, err := filepath.Abs(cachePath)
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to resolve absolute path for: %s", cachePath)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
err = syscall.Statfs(absPath, &stat)
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to retrieve filesystem stats for: %s", absPath)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Total disk space in bytes
|
||||||
|
return stat.Blocks * uint64(stat.Bsize)
|
||||||
|
}
|
||||||
|
|
||||||
// Helper to format bytes back to human-readable string
|
// Helper to format bytes back to human-readable string
|
||||||
func formatMaxUsage(bytes uint64) string {
|
func formatMaxUsage(bytes uint64) string {
|
||||||
const GiB = 1024 * 1024 * 1024
|
const GiB = 1024 * 1024 * 1024
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -35,12 +32,8 @@ func fetchPageMetadataStandard(pageURL, userAgent string) (string, string, strin
|
||||||
|
|
||||||
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
|
// fetchPageMetadataChrome uses Chromedp to handle JavaScript-rendered pages.
|
||||||
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
|
func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string) {
|
||||||
// Create a custom allocator context for Chromedp with proxy support if enabled
|
// Create context
|
||||||
allocCtx, cancelAlloc := chromedp.NewExecAllocator(context.Background(), configureChromeOptions()...)
|
ctx, cancel := chromedp.NewContext(context.Background())
|
||||||
defer cancelAlloc()
|
|
||||||
|
|
||||||
// Create a browser context
|
|
||||||
ctx, cancel := chromedp.NewContext(allocCtx)
|
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
var renderedHTML string
|
var renderedHTML string
|
||||||
|
@ -64,36 +57,9 @@ func fetchPageMetadataChrome(pageURL, userAgent string) (string, string, string)
|
||||||
return extractParsedDOM(doc)
|
return extractParsedDOM(doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
// configureChromeOptions sets up Chrome options and proxy if CrawlerProxy is enabled.
|
|
||||||
func configureChromeOptions() []chromedp.ExecAllocatorOption {
|
|
||||||
options := chromedp.DefaultExecAllocatorOptions[:]
|
|
||||||
|
|
||||||
// This code is not using config.CrawlerProxyRetry
|
|
||||||
if config.CrawlerProxyEnabled && crawlerProxyClient != nil {
|
|
||||||
// Retrieve proxy settings from CrawlerProxy
|
|
||||||
proxy := crawlerProxyClient.GetProxy() // Ensure a `GetProxy` method is implemented for your proxy client
|
|
||||||
if proxy != "" {
|
|
||||||
options = append(options, chromedp.ProxyServer(proxy))
|
|
||||||
printDebug("Using CrawlerProxy for Chromedp: %s", proxy)
|
|
||||||
} else {
|
|
||||||
printWarn("CrawlerProxy is enabled but no valid proxy is available")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// // Add additional Chrome
|
|
||||||
// options = append(options,
|
|
||||||
// chromedp.Flag("headless", true),
|
|
||||||
// chromedp.Flag("disable-gpu", true),
|
|
||||||
// chromedp.Flag("no-sandbox", true),
|
|
||||||
// chromedp.Flag("disable-setuid-sandbox", true),
|
|
||||||
// )
|
|
||||||
|
|
||||||
return options
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractStandard does the normal HTML parse with OG, Twitter, etc.
|
// extractStandard does the normal HTML parse with OG, Twitter, etc.
|
||||||
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
|
func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
|
||||||
|
client := &http.Client{Timeout: 15 * time.Second}
|
||||||
req, err := http.NewRequest("GET", pageURL, nil)
|
req, err := http.NewRequest("GET", pageURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printDebug("Failed to create request for %s: %v", pageURL, err)
|
printDebug("Failed to create request for %s: %v", pageURL, err)
|
||||||
|
@ -102,8 +68,7 @@ func extractStandard(pageURL, userAgent string) (title, desc, keywords string) {
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||||
|
|
||||||
// Use CrawlerProxy if enabled
|
resp, err := client.Do(req)
|
||||||
resp, err := DoCrawlerProxyRequest(req)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printDebug("Failed to GET %s: %v", pageURL, err)
|
printDebug("Failed to GET %s: %v", pageURL, err)
|
||||||
return
|
return
|
||||||
|
@ -211,6 +176,7 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
|
||||||
return title, desc, keywords
|
return title, desc, keywords
|
||||||
}
|
}
|
||||||
|
|
||||||
|
client := &http.Client{Timeout: 15 * time.Second}
|
||||||
readReq, err := http.NewRequest("GET", pageURL, nil)
|
readReq, err := http.NewRequest("GET", pageURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printDebug("Failed to create fallbackReadability request: %v", err)
|
printDebug("Failed to create fallbackReadability request: %v", err)
|
||||||
|
@ -219,16 +185,14 @@ func fallbackReadability(pageURL, userAgent, title, desc, keywords string) (stri
|
||||||
readReq.Header.Set("User-Agent", userAgent)
|
readReq.Header.Set("User-Agent", userAgent)
|
||||||
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
readReq.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||||
|
|
||||||
// Use CrawlerProxy if enabled
|
readResp, err := client.Do(readReq)
|
||||||
readResp, err := DoCrawlerProxyRequest(readReq)
|
if err != nil || readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printDebug("go-readability GET error for %s: %v", pageURL, err)
|
printDebug("go-readability GET error for %s: %v", pageURL, err)
|
||||||
return title, desc, keywords
|
|
||||||
}
|
}
|
||||||
|
if readResp != nil {
|
||||||
if readResp.StatusCode < 200 || readResp.StatusCode >= 300 {
|
readResp.Body.Close()
|
||||||
printDebug("go-readability GET returned status %d for %s", readResp.StatusCode, pageURL)
|
}
|
||||||
readResp.Body.Close() // Safely close body
|
|
||||||
return title, desc, keywords
|
return title, desc, keywords
|
||||||
}
|
}
|
||||||
defer readResp.Body.Close()
|
defer readResp.Body.Close()
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
27
disk.go
27
disk.go
|
@ -1,27 +0,0 @@
|
||||||
//go:build !windows
|
|
||||||
// +build !windows
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"path/filepath"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
func getTotalDiskSpace(cachePath string) uint64 {
|
|
||||||
var stat syscall.Statfs_t
|
|
||||||
|
|
||||||
absPath, err := filepath.Abs(cachePath)
|
|
||||||
if err != nil {
|
|
||||||
printErr("Failed to resolve absolute path for: %s", cachePath)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
err = syscall.Statfs(absPath, &stat)
|
|
||||||
if err != nil {
|
|
||||||
printErr("Failed to retrieve filesystem stats for: %s", absPath)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return stat.Blocks * uint64(stat.Bsize)
|
|
||||||
}
|
|
36
disk_win.go
36
disk_win.go
|
@ -1,36 +0,0 @@
|
||||||
//go:build windows
|
|
||||||
// +build windows
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"syscall"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
func getTotalDiskSpace(path string) uint64 {
|
|
||||||
kernel32 := syscall.NewLazyDLL("kernel32.dll")
|
|
||||||
getDiskFreeSpaceExW := kernel32.NewProc("GetDiskFreeSpaceExW")
|
|
||||||
|
|
||||||
lpDirectoryName, err := syscall.UTF16PtrFromString(path)
|
|
||||||
if err != nil {
|
|
||||||
printErr("Failed to encode path for Windows API: %v", err)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
var freeBytesAvailable, totalNumberOfBytes, totalNumberOfFreeBytes uint64
|
|
||||||
|
|
||||||
r1, _, err := getDiskFreeSpaceExW.Call(
|
|
||||||
uintptr(unsafe.Pointer(lpDirectoryName)),
|
|
||||||
uintptr(unsafe.Pointer(&freeBytesAvailable)),
|
|
||||||
uintptr(unsafe.Pointer(&totalNumberOfBytes)),
|
|
||||||
uintptr(unsafe.Pointer(&totalNumberOfFreeBytes)),
|
|
||||||
)
|
|
||||||
|
|
||||||
if r1 == 0 {
|
|
||||||
printErr("GetDiskFreeSpaceExW failed: %v", err)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return totalNumberOfBytes
|
|
||||||
}
|
|
544
favicon.go
544
favicon.go
|
@ -1,544 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/md5"
|
|
||||||
"crypto/tls"
|
|
||||||
"encoding/base64"
|
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
|
||||||
"image"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/chai2010/webp"
|
|
||||||
"golang.org/x/image/draw"
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
faviconCache = struct {
|
|
||||||
sync.RWMutex
|
|
||||||
m map[string]bool // tracks in-progress downloads
|
|
||||||
}{m: make(map[string]bool)}
|
|
||||||
|
|
||||||
// Common favicon paths to try
|
|
||||||
commonFaviconPaths = []string{
|
|
||||||
"/favicon.ico",
|
|
||||||
"/favicon.png",
|
|
||||||
"/favicon.jpg",
|
|
||||||
"/favicon.jpeg",
|
|
||||||
"/favicon.webp",
|
|
||||||
"/apple-touch-icon.png",
|
|
||||||
"/apple-touch-icon-precomposed.png",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// Add this near the top with other vars
|
|
||||||
var (
|
|
||||||
faviconDownloadQueue = make(chan faviconDownloadRequest, 1000)
|
|
||||||
)
|
|
||||||
|
|
||||||
type faviconDownloadRequest struct {
|
|
||||||
faviconURL string
|
|
||||||
pageURL string
|
|
||||||
cacheID string
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
// Start 5 worker goroutines to process favicon downloads
|
|
||||||
for i := 0; i < 5; i++ {
|
|
||||||
go faviconDownloadWorker()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func faviconDownloadWorker() {
|
|
||||||
for req := range faviconDownloadQueue {
|
|
||||||
cacheFavicon(req.faviconURL, req.cacheID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generates a cache ID from URL
|
|
||||||
func faviconIDFromURL(rawURL string) string {
|
|
||||||
hasher := md5.New()
|
|
||||||
hasher.Write([]byte(rawURL))
|
|
||||||
return hex.EncodeToString(hasher.Sum(nil))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolves favicon URL using multiple methods
|
|
||||||
func resolveFaviconURL(rawFavicon, pageURL string) (faviconURL, cacheID string) {
|
|
||||||
cacheID = faviconIDFromURL(pageURL)
|
|
||||||
|
|
||||||
// Handle data URLs first
|
|
||||||
if strings.HasPrefix(rawFavicon, "data:image") {
|
|
||||||
parts := strings.SplitN(rawFavicon, ";base64,", 2)
|
|
||||||
if len(parts) == 2 {
|
|
||||||
data, err := base64.StdEncoding.DecodeString(parts[1])
|
|
||||||
if err == nil {
|
|
||||||
hasher := md5.New()
|
|
||||||
hasher.Write(data)
|
|
||||||
return rawFavicon, hex.EncodeToString(hasher.Sum(nil))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", "" // Invalid data URL
|
|
||||||
}
|
|
||||||
|
|
||||||
// Existing URL handling logic
|
|
||||||
if rawFavicon != "" && strings.HasPrefix(rawFavicon, "http") {
|
|
||||||
cacheID = faviconIDFromURL(rawFavicon)
|
|
||||||
return rawFavicon, cacheID
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedPage, err := url.Parse(pageURL)
|
|
||||||
if err != nil {
|
|
||||||
return "", ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Method 1: Parse HTML
|
|
||||||
if favicon := findFaviconInHTML(pageURL); favicon != "" {
|
|
||||||
if strings.HasPrefix(favicon, "http") {
|
|
||||||
return favicon, faviconIDFromURL(favicon)
|
|
||||||
}
|
|
||||||
resolved := resolveRelativeURL(parsedPage, favicon)
|
|
||||||
return resolved, faviconIDFromURL(resolved)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Method 2: Common paths
|
|
||||||
for _, path := range commonFaviconPaths {
|
|
||||||
testURL := "https://" + parsedPage.Host + path
|
|
||||||
if checkURLExists(testURL) {
|
|
||||||
return testURL, faviconIDFromURL(testURL)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Method 3: HTTP headers
|
|
||||||
if headerIcon := findFaviconInHeaders(pageURL); headerIcon != "" {
|
|
||||||
if strings.HasPrefix(headerIcon, "http") {
|
|
||||||
return headerIcon, faviconIDFromURL(headerIcon)
|
|
||||||
}
|
|
||||||
resolved := resolveRelativeURL(parsedPage, headerIcon)
|
|
||||||
return resolved, faviconIDFromURL(resolved)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback
|
|
||||||
fallbackURL := "https://" + parsedPage.Host + "/favicon.ico"
|
|
||||||
return fallbackURL, faviconIDFromURL(fallbackURL)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Checks HTTP headers for favicon links
|
|
||||||
func findFaviconInHeaders(pageURL string) string {
|
|
||||||
client := &http.Client{
|
|
||||||
Timeout: 3 * time.Second, // like 3 seconds for favicon should be enough
|
|
||||||
Transport: &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("HEAD", pageURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add User-Agent
|
|
||||||
userAgent, err := GetUserAgent("findFaviconInHeaders")
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Error getting User-Agent: %v", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", userAgent)
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
// Check Link headers (common for favicons)
|
|
||||||
if links, ok := resp.Header["Link"]; ok {
|
|
||||||
for _, link := range links {
|
|
||||||
parts := strings.Split(link, ";")
|
|
||||||
if len(parts) < 2 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
urlPart := strings.TrimSpace(parts[0])
|
|
||||||
if !strings.HasPrefix(urlPart, "<") || !strings.HasSuffix(urlPart, ">") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
urlPart = urlPart[1 : len(urlPart)-1] // Remove < and >
|
|
||||||
for _, part := range parts[1:] {
|
|
||||||
part = strings.TrimSpace(part)
|
|
||||||
if strings.EqualFold(part, `rel="icon"`) ||
|
|
||||||
strings.EqualFold(part, `rel=icon`) ||
|
|
||||||
strings.EqualFold(part, `rel="shortcut icon"`) ||
|
|
||||||
strings.EqualFold(part, `rel=shortcut icon`) {
|
|
||||||
return urlPart
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper to resolve relative URLs
|
|
||||||
func resolveRelativeURL(base *url.URL, relative string) string {
|
|
||||||
if strings.HasPrefix(relative, "http") {
|
|
||||||
return relative
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(relative, "//") {
|
|
||||||
return base.Scheme + ":" + relative
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(relative, "/") {
|
|
||||||
return base.Scheme + "://" + base.Host + relative
|
|
||||||
}
|
|
||||||
return base.Scheme + "://" + base.Host + base.Path + "/" + relative
|
|
||||||
}
|
|
||||||
|
|
||||||
// Checks if a URL exists (returns 200 OK)
|
|
||||||
func checkURLExists(url string) bool {
|
|
||||||
client := &http.Client{
|
|
||||||
Timeout: 5 * time.Second,
|
|
||||||
Transport: &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest("HEAD", url, nil)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add User-Agent
|
|
||||||
userAgent, err := GetUserAgent("Text-Search-Brave")
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Error getting User-Agent: %v", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("checkURLExists", userAgent)
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
resp.Body.Close()
|
|
||||||
return resp.StatusCode == http.StatusOK
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetches HTML and looks for favicon links
|
|
||||||
func findFaviconInHTML(pageURL string) string {
|
|
||||||
client := &http.Client{
|
|
||||||
Timeout: 10 * time.Second,
|
|
||||||
Transport: &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", pageURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add User-Agent
|
|
||||||
userAgent, err := GetUserAgent("findFaviconInHTML")
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Error getting User-Agent: %v", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", userAgent)
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
// Check if this is an AMP page
|
|
||||||
isAMP := false
|
|
||||||
for _, attr := range resp.Header["Link"] {
|
|
||||||
if strings.Contains(attr, "rel=\"amphtml\"") {
|
|
||||||
isAMP = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse HTML
|
|
||||||
doc, err := html.Parse(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
var faviconURL string
|
|
||||||
var findLinks func(*html.Node)
|
|
||||||
findLinks = func(n *html.Node) {
|
|
||||||
if n.Type == html.ElementNode && n.Data == "link" {
|
|
||||||
var rel, href string
|
|
||||||
for _, attr := range n.Attr {
|
|
||||||
switch attr.Key {
|
|
||||||
case "rel":
|
|
||||||
rel = attr.Val
|
|
||||||
case "href":
|
|
||||||
href = attr.Val
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prioritize different favicon types
|
|
||||||
if href != "" {
|
|
||||||
switch rel {
|
|
||||||
case "icon", "shortcut icon", "apple-touch-icon", "apple-touch-icon-precomposed":
|
|
||||||
// For AMP pages, prefer the non-versioned URL if possible
|
|
||||||
if isAMP {
|
|
||||||
if u, err := url.Parse(href); err == nil {
|
|
||||||
u.RawQuery = "" // Remove query parameters
|
|
||||||
href = u.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if faviconURL == "" || // First found
|
|
||||||
rel == "apple-touch-icon" || // Prefer apple-touch-icon
|
|
||||||
rel == "icon" { // Then regular icon
|
|
||||||
faviconURL = href
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
|
||||||
findLinks(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
findLinks(doc)
|
|
||||||
|
|
||||||
return faviconURL
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFaviconProxyURL(rawFavicon, pageURL string) string {
|
|
||||||
if pageURL == "" {
|
|
||||||
return "/static/images/globe.svg"
|
|
||||||
}
|
|
||||||
|
|
||||||
cacheID := faviconIDFromURL(pageURL)
|
|
||||||
filename := fmt.Sprintf("%s_icon.webp", cacheID)
|
|
||||||
cachedPath := filepath.Join(config.DriveCache.Path, "images", filename)
|
|
||||||
|
|
||||||
if _, err := os.Stat(cachedPath); err == nil {
|
|
||||||
return fmt.Sprintf("/image/%s_icon.webp", cacheID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve URL
|
|
||||||
faviconURL, _ := resolveFaviconURL(rawFavicon, pageURL)
|
|
||||||
if faviconURL == "" {
|
|
||||||
recordInvalidImageID(cacheID)
|
|
||||||
return "/static/images/globe.svg"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if already downloading
|
|
||||||
faviconCache.RLock()
|
|
||||||
downloading := faviconCache.m[cacheID]
|
|
||||||
faviconCache.RUnlock()
|
|
||||||
|
|
||||||
if !downloading {
|
|
||||||
faviconCache.Lock()
|
|
||||||
faviconCache.m[cacheID] = true
|
|
||||||
faviconCache.Unlock()
|
|
||||||
|
|
||||||
// Send to download queue instead of starting goroutine
|
|
||||||
faviconDownloadQueue <- faviconDownloadRequest{
|
|
||||||
faviconURL: faviconURL,
|
|
||||||
pageURL: pageURL,
|
|
||||||
cacheID: cacheID,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Sprintf("/image/%s_icon.webp", cacheID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Caches favicon, always saving *_icon.webp
|
|
||||||
func cacheFavicon(imageURL, imageID string) (string, bool, error) {
|
|
||||||
// if imageURL == "" {
|
|
||||||
// recordInvalidImageID(imageID)
|
|
||||||
// return "", false, fmt.Errorf("empty image URL for image ID %s", imageID)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// Debug
|
|
||||||
printDebug("Downloading favicon ID: %s\n", imageID)
|
|
||||||
|
|
||||||
filename := fmt.Sprintf("%s_icon.webp", imageID)
|
|
||||||
imageCacheDir := filepath.Join(config.DriveCache.Path, "images")
|
|
||||||
if err := os.MkdirAll(imageCacheDir, 0755); err != nil {
|
|
||||||
return "", false, fmt.Errorf("couldn't create images folder: %v", err)
|
|
||||||
}
|
|
||||||
cachedImagePath := filepath.Join(imageCacheDir, filename)
|
|
||||||
tempImagePath := cachedImagePath + ".tmp"
|
|
||||||
|
|
||||||
// Already cached?
|
|
||||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
|
||||||
return cachedImagePath, true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
cachingImagesMu.Lock()
|
|
||||||
if _, exists := cachingImages[imageURL]; !exists {
|
|
||||||
cachingImages[imageURL] = &sync.Mutex{}
|
|
||||||
}
|
|
||||||
mu := cachingImages[imageURL]
|
|
||||||
cachingImagesMu.Unlock()
|
|
||||||
|
|
||||||
mu.Lock()
|
|
||||||
defer mu.Unlock()
|
|
||||||
|
|
||||||
// Recheck after lock
|
|
||||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
|
||||||
return cachedImagePath, true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
cachingSemaphore <- struct{}{}
|
|
||||||
defer func() { <-cachingSemaphore }()
|
|
||||||
|
|
||||||
var data []byte
|
|
||||||
var contentType string
|
|
||||||
|
|
||||||
// Handle data URLs
|
|
||||||
if strings.HasPrefix(imageURL, "data:") {
|
|
||||||
commaIndex := strings.Index(imageURL, ",")
|
|
||||||
if commaIndex == -1 {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, fmt.Errorf("invalid data URL: no comma")
|
|
||||||
}
|
|
||||||
headerPart := imageURL[:commaIndex]
|
|
||||||
dataPart := imageURL[commaIndex+1:]
|
|
||||||
|
|
||||||
mediaType := "text/plain"
|
|
||||||
base64Encoded := false
|
|
||||||
if strings.HasPrefix(headerPart, "data:") {
|
|
||||||
mediaTypePart := headerPart[5:]
|
|
||||||
mediaTypeParts := strings.SplitN(mediaTypePart, ";", 2)
|
|
||||||
mediaType = mediaTypeParts[0]
|
|
||||||
if len(mediaTypeParts) > 1 {
|
|
||||||
for _, param := range strings.Split(mediaTypeParts[1], ";") {
|
|
||||||
param = strings.TrimSpace(param)
|
|
||||||
if param == "base64" {
|
|
||||||
base64Encoded = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if base64Encoded {
|
|
||||||
data, _ = base64.StdEncoding.DecodeString(dataPart)
|
|
||||||
} else {
|
|
||||||
decodedStr, err := url.QueryUnescape(dataPart)
|
|
||||||
if err != nil {
|
|
||||||
data = []byte(dataPart)
|
|
||||||
} else {
|
|
||||||
data = []byte(decodedStr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
contentType = mediaType
|
|
||||||
} else {
|
|
||||||
// Download from HTTP URL
|
|
||||||
client := &http.Client{
|
|
||||||
Timeout: 15 * time.Second,
|
|
||||||
Transport: &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", imageURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add User-Agent
|
|
||||||
userAgent, err := GetUserAgent("Text-Search-Brave")
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Error getting User-Agent: %v", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", userAgent)
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
data, err = io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
contentType = http.DetectContentType(data)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !strings.HasPrefix(contentType, "image/") {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, fmt.Errorf("URL did not return an image: %s", imageURL)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SVG special case
|
|
||||||
if contentType == "image/svg+xml" {
|
|
||||||
err := os.WriteFile(tempImagePath, data, 0644)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
err = os.Rename(tempImagePath, cachedImagePath)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
cachingImagesMu.Lock()
|
|
||||||
delete(cachingImages, imageURL)
|
|
||||||
cachingImagesMu.Unlock()
|
|
||||||
return cachedImagePath, true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decode image
|
|
||||||
img, err := safeDecodeImage(contentType, data)
|
|
||||||
if err != nil {
|
|
||||||
printErr("Failed to decode favicon: %s [%s] (%v)", imageURL, imageID, err)
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resize
|
|
||||||
maxSize := 16
|
|
||||||
width := img.Bounds().Dx()
|
|
||||||
height := img.Bounds().Dy()
|
|
||||||
|
|
||||||
if width > maxSize || height > maxSize {
|
|
||||||
dst := image.NewRGBA(image.Rect(0, 0, maxSize, maxSize))
|
|
||||||
draw.ApproxBiLinear.Scale(dst, dst.Bounds(), img, img.Bounds(), draw.Over, nil)
|
|
||||||
img = dst
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save as WebP
|
|
||||||
outFile, err := os.Create(tempImagePath)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
defer outFile.Close()
|
|
||||||
|
|
||||||
options := &webp.Options{Lossless: false, Quality: 80}
|
|
||||||
err = webp.Encode(outFile, img, options)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = os.Rename(tempImagePath, cachedImagePath)
|
|
||||||
if err != nil {
|
|
||||||
recordInvalidImageID(imageID)
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
cachingImagesMu.Lock()
|
|
||||||
delete(cachingImages, imageURL)
|
|
||||||
cachingImagesMu.Unlock()
|
|
||||||
|
|
||||||
return cachedImagePath, true, nil
|
|
||||||
}
|
|
107
files-nyaa.go
107
files-nyaa.go
|
@ -1,107 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
const NYAA_DOMAIN = "nyaa.si"
|
|
||||||
|
|
||||||
type Nyaa struct{}
|
|
||||||
|
|
||||||
func NewNyaa() *Nyaa {
|
|
||||||
return &Nyaa{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *Nyaa) Name() string {
|
|
||||||
return "nyaa"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *Nyaa) getCategoryCode(cat string) string {
|
|
||||||
switch cat {
|
|
||||||
case "all":
|
|
||||||
return ""
|
|
||||||
case "anime":
|
|
||||||
return "&c=1_0"
|
|
||||||
case "music":
|
|
||||||
return "&c=2_0"
|
|
||||||
case "game":
|
|
||||||
return "&c=6_2"
|
|
||||||
case "software":
|
|
||||||
return "&c=6_1"
|
|
||||||
default:
|
|
||||||
return "ignore"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *Nyaa) Search(query string, category string) ([]TorrentResult, error) {
|
|
||||||
categoryCode := n.getCategoryCode(category)
|
|
||||||
if categoryCode == "ignore" {
|
|
||||||
return []TorrentResult{}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
searchURL := fmt.Sprintf("https://%s/?f=0&q=%s%s", NYAA_DOMAIN, url.QueryEscape(query), categoryCode)
|
|
||||||
|
|
||||||
userAgent, err := GetUserAgent("files-nyaa")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", userAgent)
|
|
||||||
|
|
||||||
resp, err := DoMetaProxyRequest(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, fmt.Errorf("unexpected status: %d", resp.StatusCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var results []TorrentResult
|
|
||||||
doc.Find(".default, .success, .danger").Each(func(i int, s *goquery.Selection) {
|
|
||||||
tds := s.Find("td")
|
|
||||||
if tds.Length() < 7 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
title := tds.Eq(1).Find("a").Last().Text()
|
|
||||||
magnet, _ := tds.Eq(2).Find("a").Last().Attr("href")
|
|
||||||
sizeStr := strings.TrimSpace(tds.Eq(3).Text())
|
|
||||||
byteSize := parseSize(sizeStr)
|
|
||||||
|
|
||||||
seeders := parseInt(tds.Eq(5).Text())
|
|
||||||
leechers := parseInt(tds.Eq(6).Text())
|
|
||||||
|
|
||||||
results = append(results, TorrentResult{
|
|
||||||
URL: "https://" + NYAA_DOMAIN,
|
|
||||||
Title: title,
|
|
||||||
Magnet: applyTrackers(magnet),
|
|
||||||
Size: formatSize(byteSize),
|
|
||||||
Seeders: seeders,
|
|
||||||
Leechers: leechers,
|
|
||||||
Views: 0,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Reverse the results slice, so It's from newest to oldest, but the orders is still kinda random
|
|
||||||
for i, j := 0, len(results)-1; i < j; i, j = i+1, j-1 {
|
|
||||||
results[i], results[j] = results[j], results[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, nil
|
|
||||||
}
|
|
|
@ -57,34 +57,31 @@ func (t *ThePirateBay) Search(query string, category string) ([]TorrentResult, e
|
||||||
return []TorrentResult{}, nil
|
return []TorrentResult{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
searchURL := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
|
url := fmt.Sprintf("https://%s/q.php?q=%s&cat=%s", PIRATEBAY_DOMAIN, url.QueryEscape(query), categoryCode)
|
||||||
|
|
||||||
// User Agent generation
|
// User Agent generation
|
||||||
userAgent, err := GetUserAgent("files-tpb")
|
userAgent, err := GetUserAgent("files-tpb")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error generating User-Agent: %w", err)
|
fmt.Println("Error:", err)
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", url, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error creating request: %w", err)
|
return nil, err
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
|
||||||
// Perform the request using MetaProxy if enabled
|
client := &http.Client{}
|
||||||
resp, err := DoMetaProxyRequest(req)
|
response, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error making request to The Pirate Bay: %w", err)
|
return nil, err
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
|
||||||
}
|
}
|
||||||
|
defer response.Body.Close()
|
||||||
|
|
||||||
var torrentData []map[string]interface{}
|
var torrentData []map[string]interface{}
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&torrentData); err != nil {
|
if err := json.NewDecoder(response.Body).Decode(&torrentData); err != nil {
|
||||||
return nil, fmt.Errorf("error decoding response JSON: %w", err)
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var results []TorrentResult
|
var results []TorrentResult
|
||||||
|
|
|
@ -62,17 +62,18 @@ func (tg *TorrentGalaxy) Search(query string, category string) ([]TorrentResult,
|
||||||
// User Agent generation
|
// User Agent generation
|
||||||
userAgent, err := GetUserAgent("files-torrentgalaxy")
|
userAgent, err := GetUserAgent("files-torrentgalaxy")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error generating User-Agent: %w", err)
|
fmt.Println("Error:", err)
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error creating request: %w", err)
|
return nil, err
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
|
||||||
// Perform the request using MetaProxy if enabled
|
client := &http.Client{}
|
||||||
resp, err := DoMetaProxyRequest(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
|
return nil, fmt.Errorf("error making request to TorrentGalaxy: %w", err)
|
||||||
}
|
}
|
||||||
|
|
79
files.go
79
files.go
|
@ -28,26 +28,14 @@ var (
|
||||||
rutor TorrentSite
|
rutor TorrentSite
|
||||||
)
|
)
|
||||||
|
|
||||||
func initFileEngines() {
|
var fileResultsChan = make(chan []TorrentResult)
|
||||||
|
|
||||||
torrentGalaxy = nil
|
func init() {
|
||||||
thePirateBay = nil
|
|
||||||
nyaa = nil
|
|
||||||
// rutor = nil
|
|
||||||
|
|
||||||
for _, engineName := range config.MetaSearch.Files {
|
|
||||||
switch engineName {
|
|
||||||
case "TorrentGalaxy":
|
|
||||||
torrentGalaxy = NewTorrentGalaxy()
|
torrentGalaxy = NewTorrentGalaxy()
|
||||||
case "ThePirateBay":
|
// nyaa = NewNyaa()
|
||||||
thePirateBay = NewThePirateBay()
|
thePirateBay = NewThePirateBay()
|
||||||
case "Nyaa":
|
|
||||||
nyaa = NewNyaa()
|
|
||||||
// case "Rutor":
|
|
||||||
// rutor = NewRutor()
|
// rutor = NewRutor()
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
@ -64,7 +52,7 @@ func handleFileSearch(w http.ResponseWriter, settings UserSettings, query string
|
||||||
data := map[string]interface{}{
|
data := map[string]interface{}{
|
||||||
"Results": combinedResults,
|
"Results": combinedResults,
|
||||||
"Query": query,
|
"Query": query,
|
||||||
"Fetched": FormatElapsedTime(elapsedTime),
|
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
|
||||||
"Category": "all",
|
"Category": "all",
|
||||||
"Sort": "seed",
|
"Sort": "seed",
|
||||||
"Page": page,
|
"Page": page,
|
||||||
|
@ -100,7 +88,7 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
// Fetch only if the cache miss occurs and Crawler is enabled
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
if config.MetaSearchEnabled {
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -109,12 +97,12 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
printDebug("Crawler disabled; skipping fetching.")
|
printDebug("Crawler disabled; skipping fetching.")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
_, torrentResults, _, _, _ := convertToSpecificResults(results)
|
_, torrentResults, _, _ := convertToSpecificResults(results)
|
||||||
combinedResults = torrentResults
|
combinedResults = torrentResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printDebug("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
if config.MetaSearchEnabled {
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchFileResults(query, safe, lang, page)
|
combinedResults = fetchFileResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -129,13 +117,13 @@ func getFileResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
|
|
||||||
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||||
// If Crawler is disabled, skip fetching from torrent sites
|
// If Crawler is disabled, skip fetching from torrent sites
|
||||||
if !config.MetaSearchEnabled {
|
if !config.CrawlerEnabled {
|
||||||
printInfo("Crawler is disabled; skipping torrent site fetching.")
|
printInfo("Crawler is disabled; skipping torrent site fetching.")
|
||||||
return []TorrentResult{}
|
return []TorrentResult{}
|
||||||
}
|
}
|
||||||
|
|
||||||
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
sites := []TorrentSite{torrentGalaxy, nyaa, thePirateBay, rutor}
|
||||||
var results []TorrentResult
|
results := []TorrentResult{}
|
||||||
|
|
||||||
for _, site := range sites {
|
for _, site := range sites {
|
||||||
if site == nil {
|
if site == nil {
|
||||||
|
@ -152,6 +140,11 @@ func fetchFileResults(query, safe, lang string, page int) []TorrentResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(results) == 0 {
|
||||||
|
printWarn("No file results found for query: %s, trying other nodes", query)
|
||||||
|
results = tryOtherNodesForFileSearch(query, safe, lang, page, []string{hostID})
|
||||||
|
}
|
||||||
|
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -174,34 +167,33 @@ func parseSize(sizeStr string) int64 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
re := regexp.MustCompile(`(?i)([\d.]+)\s*(K?M?G?T?i?B)`)
|
// Use regex to extract numeric value and unit separately
|
||||||
|
re := regexp.MustCompile(`(?i)([\d.]+)\s*([KMGT]?B)`)
|
||||||
matches := re.FindStringSubmatch(sizeStr)
|
matches := re.FindStringSubmatch(sizeStr)
|
||||||
if len(matches) < 3 {
|
if len(matches) < 3 {
|
||||||
printWarn("Error parsing size: invalid format %s", sizeStr)
|
printWarn("Error parsing size: invalid format %s", sizeStr)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
numStr := matches[1]
|
sizeStr = matches[1]
|
||||||
unit := strings.ToUpper(matches[2])
|
unit := strings.ToUpper(matches[2])
|
||||||
|
|
||||||
var multiplier int64 = 1
|
var multiplier int64 = 1
|
||||||
switch unit {
|
switch unit {
|
||||||
case "B":
|
case "KB":
|
||||||
multiplier = 1
|
|
||||||
case "KB", "KIB":
|
|
||||||
multiplier = 1024
|
multiplier = 1024
|
||||||
case "MB", "MIB":
|
case "MB":
|
||||||
multiplier = 1024 * 1024
|
multiplier = 1024 * 1024
|
||||||
case "GB", "GIB":
|
case "GB":
|
||||||
multiplier = 1024 * 1024 * 1024
|
multiplier = 1024 * 1024 * 1024
|
||||||
case "TB", "TIB":
|
case "TB":
|
||||||
multiplier = 1024 * 1024 * 1024 * 1024
|
multiplier = 1024 * 1024 * 1024 * 1024
|
||||||
default:
|
default:
|
||||||
printWarn("Unknown unit: %s", unit)
|
printWarn("Unknown unit: %s", unit)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
size, err := strconv.ParseFloat(numStr, 64)
|
size, err := strconv.ParseFloat(sizeStr, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error parsing size: %v", err)
|
printWarn("Error parsing size: %v", err)
|
||||||
return 0
|
return 0
|
||||||
|
@ -227,17 +219,17 @@ func applyTrackers(magnetLink string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func formatSize(size int64) string {
|
func formatSize(size int64) string {
|
||||||
const unit = 1024
|
if size >= 1024*1024*1024*1024 {
|
||||||
if size < unit {
|
return fmt.Sprintf("%.2f TB", float64(size)/(1024*1024*1024*1024))
|
||||||
|
} else if size >= 1024*1024*1024 {
|
||||||
|
return fmt.Sprintf("%.2f GB", float64(size)/(1024*1024*1024))
|
||||||
|
} else if size >= 1024*1024 {
|
||||||
|
return fmt.Sprintf("%.2f MB", float64(size)/(1024*1024))
|
||||||
|
} else if size >= 1024 {
|
||||||
|
return fmt.Sprintf("%.2f KB", float64(size)/1024)
|
||||||
|
}
|
||||||
return fmt.Sprintf("%d B", size)
|
return fmt.Sprintf("%d B", size)
|
||||||
}
|
}
|
||||||
div, exp := unit, 0
|
|
||||||
for n := size / unit; n >= unit; n /= unit {
|
|
||||||
div *= unit
|
|
||||||
exp++
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%.1f %siB", float64(size)/float64(div), []string{"K", "M", "G", "T", "P", "E"}[exp])
|
|
||||||
}
|
|
||||||
|
|
||||||
func sanitizeFileName(name string) string {
|
func sanitizeFileName(name string) string {
|
||||||
// Replace spaces with dashes
|
// Replace spaces with dashes
|
||||||
|
@ -246,3 +238,12 @@ func sanitizeFileName(name string) string {
|
||||||
sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "")
|
sanitized = regexp.MustCompile(`[^a-zA-Z0-9\-\(\)]`).ReplaceAllString(sanitized, "")
|
||||||
return sanitized
|
return sanitized
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func contains(slice []string, item string) bool {
|
||||||
|
for _, v := range slice {
|
||||||
|
if v == item {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
57
forums.go
57
forums.go
|
@ -3,13 +3,14 @@ package main
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResult, error) {
|
||||||
if !config.MetaSearchEnabled {
|
if !config.CrawlerEnabled {
|
||||||
printDebug("Crawler is disabled; skipping forum search.")
|
printDebug("Crawler is disabled; skipping forum search.")
|
||||||
return []ForumSearchResult{}, nil
|
return []ForumSearchResult{}, nil
|
||||||
}
|
}
|
||||||
|
@ -17,43 +18,39 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
|
||||||
const (
|
const (
|
||||||
pageSize = 25
|
pageSize = 25
|
||||||
baseURL = "https://www.reddit.com"
|
baseURL = "https://www.reddit.com"
|
||||||
|
maxRetries = 5
|
||||||
|
initialBackoff = 2 * time.Second
|
||||||
)
|
)
|
||||||
|
|
||||||
var results []ForumSearchResult
|
var results []ForumSearchResult
|
||||||
offset := page * pageSize
|
|
||||||
searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d",
|
|
||||||
baseURL,
|
|
||||||
url.QueryEscape(query),
|
|
||||||
pageSize,
|
|
||||||
offset,
|
|
||||||
)
|
|
||||||
|
|
||||||
// Create request
|
searchURL := fmt.Sprintf("%s/search.json?q=%s&limit=%d&start=%d", baseURL, url.QueryEscape(query), pageSize, page*pageSize)
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
var resp *http.Response
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Retry logic with exponential backoff
|
||||||
|
for i := 0; i <= maxRetries; i++ {
|
||||||
|
resp, err = http.Get(searchURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("creating request: %v", err)
|
return nil, fmt.Errorf("making request: %v", err)
|
||||||
|
}
|
||||||
|
if resp.StatusCode != http.StatusTooManyRequests {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set User-Agent
|
// Wait for some time before retrying
|
||||||
userAgent, uaErr := GetUserAgent("Reddit-Forum-Search")
|
backoff := time.Duration(math.Pow(2, float64(i))) * initialBackoff
|
||||||
if uaErr != nil {
|
time.Sleep(backoff)
|
||||||
return nil, fmt.Errorf("getting user agent: %v", uaErr)
|
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
|
||||||
|
|
||||||
// Make request using MetaProxy logic
|
|
||||||
resp, err := DoMetaProxyRequest(req)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("making request: %v", err)
|
return nil, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
// Validate response status
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse JSON response
|
|
||||||
var searchResults map[string]interface{}
|
var searchResults map[string]interface{}
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(&searchResults); err != nil {
|
||||||
return nil, fmt.Errorf("decoding response: %v", err)
|
return nil, fmt.Errorf("decoding response: %v", err)
|
||||||
|
@ -69,9 +66,9 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
|
||||||
return nil, fmt.Errorf("no children field in data")
|
return nil, fmt.Errorf("no children field in data")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract search results
|
|
||||||
for _, post := range posts {
|
for _, post := range posts {
|
||||||
postData := post.(map[string]interface{})["data"].(map[string]interface{})
|
postData := post.(map[string]interface{})["data"].(map[string]interface{})
|
||||||
|
|
||||||
if safe == "active" && postData["over_18"].(bool) {
|
if safe == "active" && postData["over_18"].(bool) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -81,7 +78,6 @@ func PerformRedditSearch(query string, safe string, page int) ([]ForumSearchResu
|
||||||
if len(description) > 500 {
|
if len(description) > 500 {
|
||||||
description = description[:500] + "..."
|
description = description[:500] + "..."
|
||||||
}
|
}
|
||||||
|
|
||||||
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
|
publishedDate := time.Unix(int64(postData["created_utc"].(float64)), 0)
|
||||||
permalink := postData["permalink"].(string)
|
permalink := postData["permalink"].(string)
|
||||||
resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
|
resultURL := fmt.Sprintf("%s%s", baseURL, permalink)
|
||||||
|
@ -120,7 +116,7 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
||||||
"Query": query,
|
"Query": query,
|
||||||
"Results": results,
|
"Results": results,
|
||||||
"Page": page,
|
"Page": page,
|
||||||
"Fetched": FormatElapsedTime(elapsedTime),
|
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")), // Time for fetching results
|
||||||
"HasPrevPage": page > 1,
|
"HasPrevPage": page > 1,
|
||||||
"HasNextPage": len(results) >= 25,
|
"HasNextPage": len(results) >= 25,
|
||||||
"NoResults": len(results) == 0,
|
"NoResults": len(results) == 0,
|
||||||
|
@ -135,15 +131,6 @@ func handleForumsSearch(w http.ResponseWriter, settings UserSettings, query stri
|
||||||
renderTemplate(w, "forums.html", data)
|
renderTemplate(w, "forums.html", data)
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
|
|
||||||
results, err := PerformRedditSearch(query, safe, page)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Failed to fetch forum results: %v", err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
|
func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []ForumSearchResult {
|
||||||
cacheChan := make(chan []SearchResult)
|
cacheChan := make(chan []SearchResult)
|
||||||
var combinedResults []ForumSearchResult
|
var combinedResults []ForumSearchResult
|
||||||
|
@ -163,7 +150,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
// Fetch only if the cache miss occurs and Crawler is enabled
|
// Fetch only if the cache miss occurs and Crawler is enabled
|
||||||
if config.MetaSearchEnabled {
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -177,7 +164,7 @@ func getForumResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printDebug("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
if config.MetaSearchEnabled {
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchForumResults(query, safe, lang, page)
|
combinedResults = fetchForumResults(query, safe, lang, page)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
|
3
go.mod
3
go.mod
|
@ -17,7 +17,6 @@ require (
|
||||||
github.com/blevesearch/bleve/v2 v2.4.4
|
github.com/blevesearch/bleve/v2 v2.4.4
|
||||||
github.com/chromedp/cdproto v0.0.0-20241022234722-4d5d5faf59fb
|
github.com/chromedp/cdproto v0.0.0-20241022234722-4d5d5faf59fb
|
||||||
github.com/chromedp/chromedp v0.11.2
|
github.com/chromedp/chromedp v0.11.2
|
||||||
github.com/fyne-io/image v0.1.1
|
|
||||||
github.com/go-shiori/go-readability v0.0.0-20241012063810-92284fa8a71f
|
github.com/go-shiori/go-readability v0.0.0-20241012063810-92284fa8a71f
|
||||||
golang.org/x/net v0.33.0
|
golang.org/x/net v0.33.0
|
||||||
)
|
)
|
||||||
|
@ -56,11 +55,11 @@ require (
|
||||||
github.com/golang/snappy v0.0.4 // indirect
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
github.com/josharian/intern v1.0.0 // indirect
|
github.com/josharian/intern v1.0.0 // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/jsummers/gobmp v0.0.0-20230614200233-a9de23ed2e25 // indirect
|
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/mschoch/smat v0.2.0 // indirect
|
github.com/mschoch/smat v0.2.0 // indirect
|
||||||
|
github.com/stretchr/testify v1.9.0 // indirect
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||||
go.etcd.io/bbolt v1.3.11 // indirect
|
go.etcd.io/bbolt v1.3.11 // indirect
|
||||||
golang.org/x/sys v0.28.0 // indirect
|
golang.org/x/sys v0.28.0 // indirect
|
||||||
|
|
8
go.sum
8
go.sum
|
@ -56,8 +56,6 @@ github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHG
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/fyne-io/image v0.1.1 h1:WH0z4H7qfvNUw5l4p3bC1q70sa5+YWVt6HCj7y4VNyA=
|
|
||||||
github.com/fyne-io/image v0.1.1/go.mod h1:xrfYBh6yspc+KjkgdZU/ifUC9sPA5Iv7WYUBzQKK7JM=
|
|
||||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||||
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
||||||
|
@ -86,8 +84,6 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm
|
||||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/jsummers/gobmp v0.0.0-20230614200233-a9de23ed2e25 h1:YLvr1eE6cdCqjOe972w/cYF+FjW34v27+9Vo5106B4M=
|
|
||||||
github.com/jsummers/gobmp v0.0.0-20230614200233-a9de23ed2e25/go.mod h1:kLgvv7o6UM+0QSf0QjAse3wReFDsb9qbZJdfexWlrQw=
|
|
||||||
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo=
|
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo=
|
||||||
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
|
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
|
||||||
github.com/leonelquinteros/gotext v1.7.0 h1:jcJmF4AXqyamP7vuw2MMIKs+O3jAEmvrc5JQiI8Ht/8=
|
github.com/leonelquinteros/gotext v1.7.0 h1:jcJmF4AXqyamP7vuw2MMIKs+O3jAEmvrc5JQiI8Ht/8=
|
||||||
|
@ -115,8 +111,8 @@ github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMT
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||||
|
|
82
ia-calc.go
82
ia-calc.go
|
@ -1,82 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Enhanced math expression parser
|
|
||||||
func parseMathExpression(query string) (string, bool) {
|
|
||||||
// Clean and normalize the expression
|
|
||||||
query = strings.ReplaceAll(query, " ", "")
|
|
||||||
query = strings.ReplaceAll(query, ",", "")
|
|
||||||
|
|
||||||
// Regex to match valid math expressions
|
|
||||||
mathRegex := regexp.MustCompile(`^\d+(\.\d+)?([\+\-\*/\^]\d+(\.\d+)?)+$`)
|
|
||||||
if !mathRegex.MatchString(query) {
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Operator precedence handling
|
|
||||||
operators := []struct {
|
|
||||||
symbol string
|
|
||||||
apply func(float64, float64) float64
|
|
||||||
}{
|
|
||||||
{"^", func(a, b float64) float64 {
|
|
||||||
result := 1.0
|
|
||||||
for i := 0; i < int(b); i++ {
|
|
||||||
result *= a
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}},
|
|
||||||
{"*", func(a, b float64) float64 { return a * b }},
|
|
||||||
{"/", func(a, b float64) float64 { return a / b }},
|
|
||||||
{"+", func(a, b float64) float64 { return a + b }},
|
|
||||||
{"-", func(a, b float64) float64 { return a - b }},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse numbers and operators
|
|
||||||
var tokens []interface{}
|
|
||||||
current := ""
|
|
||||||
for _, char := range query {
|
|
||||||
if char >= '0' && char <= '9' || char == '.' {
|
|
||||||
current += string(char)
|
|
||||||
} else {
|
|
||||||
if current != "" {
|
|
||||||
num, _ := strconv.ParseFloat(current, 64)
|
|
||||||
tokens = append(tokens, num)
|
|
||||||
current = ""
|
|
||||||
}
|
|
||||||
tokens = append(tokens, string(char))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if current != "" {
|
|
||||||
num, _ := strconv.ParseFloat(current, 64)
|
|
||||||
tokens = append(tokens, num)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Evaluate expression with operator precedence
|
|
||||||
for _, op := range operators {
|
|
||||||
for i := 1; i < len(tokens)-1; i += 2 {
|
|
||||||
if operator, ok := tokens[i].(string); ok && operator == op.symbol {
|
|
||||||
left := tokens[i-1].(float64)
|
|
||||||
right := tokens[i+1].(float64)
|
|
||||||
result := op.apply(left, right)
|
|
||||||
|
|
||||||
// Update tokens
|
|
||||||
tokens = append(tokens[:i-1], tokens[i+2:]...)
|
|
||||||
tokens = append(tokens[:i-1], append([]interface{}{result}, tokens[i-1:]...)...)
|
|
||||||
i -= 2 // Adjust index after modification
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format result
|
|
||||||
result := tokens[0].(float64)
|
|
||||||
if result == float64(int(result)) {
|
|
||||||
return fmt.Sprintf("%d", int(result)), true
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%.2f", result), true
|
|
||||||
}
|
|
418
ia-currency.go
418
ia-currency.go
|
@ -1,418 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ExchangeRateCache holds currency rates with automatic refresh
|
|
||||||
var (
|
|
||||||
exchangeRates = make(map[string]float64)
|
|
||||||
nextUpdateTime time.Time
|
|
||||||
lastUpdateTime time.Time
|
|
||||||
exchangeCacheMutex sync.RWMutex
|
|
||||||
allCurrencies []string
|
|
||||||
)
|
|
||||||
|
|
||||||
// CurrencyAPIResponse structure for exchange rate API
|
|
||||||
type CurrencyAPIResponse struct {
|
|
||||||
Rates map[string]float64 `json:"rates"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var primaryURL = "https://open.er-api.com/v6/latest/USD"
|
|
||||||
var backupURL = "https://api.frankfurter.app/latest?base=USD"
|
|
||||||
|
|
||||||
func fetchRates(url string) ([]byte, error) {
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
return io.ReadAll(resp.Body)
|
|
||||||
}
|
|
||||||
|
|
||||||
func UpdateExchangeRates() error {
|
|
||||||
exchangeCacheMutex.Lock()
|
|
||||||
defer exchangeCacheMutex.Unlock()
|
|
||||||
|
|
||||||
var (
|
|
||||||
rates map[string]float64
|
|
||||||
nextTime time.Time
|
|
||||||
fallback bool
|
|
||||||
)
|
|
||||||
|
|
||||||
// Try primary API
|
|
||||||
body, err := fetchRates(primaryURL)
|
|
||||||
if err == nil {
|
|
||||||
var res struct {
|
|
||||||
Result string `json:"result"`
|
|
||||||
Rates map[string]float64 `json:"rates"`
|
|
||||||
TimeNextUpdateUnix int64 `json:"time_next_update_unix"`
|
|
||||||
}
|
|
||||||
if err := json.Unmarshal(body, &res); err == nil && res.Result == "success" && len(res.Rates) > 0 {
|
|
||||||
rates = res.Rates
|
|
||||||
nextTime = time.Unix(res.TimeNextUpdateUnix, 0)
|
|
||||||
} else {
|
|
||||||
printWarn("Primary API response invalid or empty, falling back to backup")
|
|
||||||
fallback = true
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
printWarn("Primary API fetch failed: %v", err)
|
|
||||||
fallback = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try backup API if needed
|
|
||||||
if fallback {
|
|
||||||
body, err := fetchRates(backupURL)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("both rate fetches failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var res struct {
|
|
||||||
Base string `json:"base"`
|
|
||||||
Date string `json:"date"`
|
|
||||||
Rates map[string]float64 `json:"rates"`
|
|
||||||
}
|
|
||||||
if err := json.Unmarshal(body, &res); err != nil {
|
|
||||||
return fmt.Errorf("backup API response unmarshal failed: %v", err)
|
|
||||||
}
|
|
||||||
if len(res.Rates) == 0 {
|
|
||||||
return fmt.Errorf("backup API returned empty rates")
|
|
||||||
}
|
|
||||||
|
|
||||||
rates = res.Rates
|
|
||||||
nextTime = time.Now().Add(6 * time.Hour)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Finalize
|
|
||||||
exchangeRates = rates
|
|
||||||
nextUpdateTime = nextTime
|
|
||||||
lastUpdateTime = time.Now()
|
|
||||||
|
|
||||||
allCurrencies = make([]string, 0, len(exchangeRates))
|
|
||||||
for c := range exchangeRates {
|
|
||||||
allCurrencies = append(allCurrencies, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
printDebug("Updated currency rates: %d currencies cached", len(allCurrencies))
|
|
||||||
printDebug("Next currency update at: %s", nextUpdateTime.Format(time.RFC1123))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// PrecacheAllCurrencyPairs pre-caches conversion rates for all currency pairs
|
|
||||||
func PrecacheAllCurrencyPairs() {
|
|
||||||
exchangeCacheMutex.RLock()
|
|
||||||
defer exchangeCacheMutex.RUnlock()
|
|
||||||
|
|
||||||
if len(exchangeRates) == 0 {
|
|
||||||
printWarn("Skipping precache: no currency rates available")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
printDebug("Precaching all currency pairs (%d total)", len(exchangeRates))
|
|
||||||
|
|
||||||
for from := range exchangeRates {
|
|
||||||
for to := range exchangeRates {
|
|
||||||
if from == to {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// Cache the cross-rate
|
|
||||||
GetExchangeRate(from, to)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
printDebug("All currency pairs precached")
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetExchangeRate gets the current exchange rate with caching
|
|
||||||
func GetExchangeRate(from, to string) (float64, bool) {
|
|
||||||
// Auto-update cache if expired
|
|
||||||
if time.Now().After(nextUpdateTime) {
|
|
||||||
// Avoid excessive updates within 1 min
|
|
||||||
if time.Since(lastUpdateTime) > time.Minute {
|
|
||||||
err := UpdateExchangeRates()
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Currency update failed: %v", err)
|
|
||||||
nextUpdateTime = time.Now().Add(5 * time.Minute)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exchangeCacheMutex.RLock()
|
|
||||||
defer exchangeCacheMutex.RUnlock()
|
|
||||||
|
|
||||||
// Handle same currency
|
|
||||||
if from == to {
|
|
||||||
return 1, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert via USD if direct rate not available
|
|
||||||
fromRate, fromExists := exchangeRates[from]
|
|
||||||
toRate, toExists := exchangeRates[to]
|
|
||||||
|
|
||||||
if !fromExists || !toExists {
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate cross rate: (1 USD / fromRate) * toRate
|
|
||||||
return toRate / fromRate, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseCurrencyConversion detects and processes currency conversion queries
|
|
||||||
func ParseCurrencyConversion(query string) (float64, string, string, bool) {
|
|
||||||
// Main conversion phrases
|
|
||||||
conversionPhrases := []string{
|
|
||||||
// Universal/math
|
|
||||||
"➞", "→", "⇒", ">", "->", "=", "≈", "~", ":", "≡",
|
|
||||||
// English
|
|
||||||
"to", "in", "into", "as", "equals", "equal to", "equals to", "is", "becomes", "be", "makes", "converted to", "convert to", "convert into", "converted into",
|
|
||||||
"exchange for", "exchanged for", "value in", "as currency", "convert", "equivalent to", "same as", "is equal to", ">", "gives", "makes", "result is", "returns", "will be", "equals:", "is equivalent to", "≈", "~", ":",
|
|
||||||
// German (DE)
|
|
||||||
"auf", "in", "zu", "umrechnen in", "umrechnen zu", "als", "gleich", "ist", "ist gleich", "umwandeln in", "wird zu", "ergibt", "macht", "ist", "resultiert in", "gleichwertig mit",
|
|
||||||
// Spanish (ES)
|
|
||||||
"en", "a", "como", "igual a", "es", "es igual a", "es igual", "convertir a", "cambiar a", "valor en", "convierte en", "devuelve", "será", "equivale a", "es equivalente a",
|
|
||||||
// French (FR)
|
|
||||||
"vers", "en", "comme", "égal à", "est", "c'est", "convertir en", "changer en", "valeur en", "équivaut à", "sera", "fait", "rend", "est égal à", "équivalent à",
|
|
||||||
// Italian (IT)
|
|
||||||
"a", "in", "come", "uguale a", "è", "convertire in", "cambiare in", "valore in", "sarà", "fa", "equivale a", "è uguale a",
|
|
||||||
// Portuguese (PT/BR)
|
|
||||||
"para", "em", "como", "igual a", "é", "converter para", "trocar por", "valor em", "converte em", "vai ser", "faz", "equivale a", "é igual a", "é equivalente a",
|
|
||||||
// Dutch (NL)
|
|
||||||
"naar", "in", "als", "is gelijk aan", "is", "wordt", "omzetten naar", "waarde in", "gelijk aan", "is hetzelfde als",
|
|
||||||
// Czech (CS)
|
|
||||||
"na", "do", "jako", "rovná se", "je", "převést na", "výměna za", "hodnota v", "přepočet", "bude", "rovná", "je to", "je rovno", "je stejné jako",
|
|
||||||
// Slovak (SK)
|
|
||||||
"na", "do", "ako", "rovná sa", "je", "previesť na", "výměna za", "hodnota v", "prerátať", "bude", "rovná", "je to", "je rovné", "je rovnaké ako",
|
|
||||||
// Polish (PL)
|
|
||||||
"na", "w", "jako", "równa się", "jest", "przelicz na", "wymień na", "wartość w", "przelicza się na", "będzie", "to jest", "jest równy", "jest taki sam jak",
|
|
||||||
// Russian (RU)
|
|
||||||
"на", "в", "как", "равно", "есть", "конвертировать в", "обменять на", "значение в", "равняется", "будет", "это", "такое же как",
|
|
||||||
// Ukrainian (UA)
|
|
||||||
"на", "у", "як", "дорівнює", "є", "конвертувати у", "обміняти на", "значення в", "буде", "це", "таке саме як",
|
|
||||||
// Croatian / Serbian / Bosnian / Slovenian (HR/SR/BS/SL)
|
|
||||||
"na", "u", "za", "kao", "jednako", "je", "pretvori u", "zamijeniti za", "vrijednost u", "preračunaj u", "biti", "to je", "jednako kao", "je isto kao",
|
|
||||||
"v", "kot", "je enako", "pretvoriti v", "zamenjati za", "vrednost v", "je isto kao", "je enakovredno",
|
|
||||||
// Bulgarian (BG)
|
|
||||||
"на", "в", "като", "равно на", "е", "преобразувай в", "обмени на", "стойност в", "ще бъде", "това е", "равностойно на",
|
|
||||||
// Turkish (TR)
|
|
||||||
"için", "olarak", "eşittir", "bu", "dönüştür to", "değiştir to", "değer olarak", "olur", "eşit", "bu olur", "aynı olarak",
|
|
||||||
// Greek (EL)
|
|
||||||
"σε", "ως", "ίσον", "είναι", "μετατροπή σε", "ανταλλαγή με", "τιμή σε", "θα είναι", "αυτό είναι", "ισοδυναμεί με", "ίσο με",
|
|
||||||
// Chinese (Simplified and Traditional, ZH)
|
|
||||||
"到", "变为", "換成", "转换为", "等于", "等於", "是", "为", "結果是", "相等於", "等同於", "一樣",
|
|
||||||
// Japanese (JA)
|
|
||||||
"に", "として", "等しい", "は", "に変換", "に交換", "の値", "は", "結果は", "となる", "同じ", "等価", "等しく",
|
|
||||||
// Korean (KO)
|
|
||||||
"으로", "같이", "같다", "이다", "로 변환", "교환하다", "값", "이 된다", "와 같다", "같음", "동일하다",
|
|
||||||
// Arabic (AR)
|
|
||||||
"إلى", "الى", "في", "كـ", "يساوي", "هو", "تحويل إلى", "قيمة في", "يصبح", "يساوي نفس", "تعادل", "تساوي",
|
|
||||||
// Hebrew (HE)
|
|
||||||
"ל", "ב", "בתור", "שווה ל", "הוא", "המר ל", "ערך ב", "יהיה", "אותו הדבר כמו", "זהה ל",
|
|
||||||
// Romanian (RO)
|
|
||||||
"la", "în", "ca", "egal cu", "este", "converti la", "schimbă în", "valoare în", "va fi", "este egal cu",
|
|
||||||
// Hungarian (HU)
|
|
||||||
"ra", "re", "ba", "be", "mint", "egyenlő", "az", "átvált", "értéke", "lesz", "ugyanaz mint",
|
|
||||||
// Swedish (SE)
|
|
||||||
"till", "i", "som", "är", "är lika med", "omvandla till", "värde i", "blir", "är samma som",
|
|
||||||
// Danish (DK)
|
|
||||||
"til", "i", "som", "er", "er lig med", "konverter til", "værdi i", "bliver", "er det samme som",
|
|
||||||
// Norwegian (NO)
|
|
||||||
"til", "i", "som", "er", "er lik", "konverter til", "verdi i", "blir", "er det samme som",
|
|
||||||
// Finnish (FI)
|
|
||||||
"ksi", "in", "kuin", "on", "on yhtä kuin", "muunna", "arvo", "tulee olemaan", "sama kuin",
|
|
||||||
// Estonian (EE)
|
|
||||||
"ks", "sisse", "nagu", "on", "on võrdne", "teisendada", "väärtus", "saab olema", "sama mis",
|
|
||||||
// Latvian (LV)
|
|
||||||
"uz", "iekš", "kā", "ir", "ir vienāds ar", "konvertēt uz", "vērtība", "būs", "tāpat kā",
|
|
||||||
// Lithuanian (LT)
|
|
||||||
"į", "kaip", "yra", "yra lygus", "konvertuoti į", "vertė", "bus", "tas pats kaip",
|
|
||||||
// Persian (FA)
|
|
||||||
"به", "در", "مثل", "برابر با", "است", "تبدیل به", "ارزش در", "خواهد بود", "همانند",
|
|
||||||
// Hindi (HI)
|
|
||||||
"को", "में", "के रूप में", "बराबर", "है", "में बदलें", "मूल्य में", "होगा", "के समान",
|
|
||||||
// Thai (TH)
|
|
||||||
"ไปที่", "ใน", "เป็น", "เท่ากับ", "คือ", "แปลงเป็น", "ค่าใน", "จะเป็น", "เท่ากัน",
|
|
||||||
// Indonesian (ID)
|
|
||||||
"ke", "dalam", "sebagai", "sama dengan", "adalah", "konversi ke", "nilai dalam", "akan menjadi", "sama dengan",
|
|
||||||
// Vietnamese (VI)
|
|
||||||
"thành", "trong", "là", "bằng", "là", "chuyển đổi thành", "giá trị trong", "sẽ là", "tương đương với",
|
|
||||||
// Malay (MS)
|
|
||||||
"kepada", "dalam", "sebagai", "sama dengan", "ialah", "tukar ke", "nilai dalam", "akan jadi", "setara dengan",
|
|
||||||
// Filipino/Tagalog (TL)
|
|
||||||
"sa", "sa loob ng", "bilang", "katumbas ng", "ay", "i-convert sa", "halaga sa", "magiging", "pareho sa",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the OR group for all currency conversion phrases to use in the regex pattern
|
|
||||||
var orGroup strings.Builder
|
|
||||||
for i, phrase := range conversionPhrases {
|
|
||||||
if i > 0 {
|
|
||||||
orGroup.WriteString("|")
|
|
||||||
}
|
|
||||||
// escape for regex with special symbols:
|
|
||||||
orGroup.WriteString(regexp.QuoteMeta(phrase))
|
|
||||||
}
|
|
||||||
regexPattern := fmt.Sprintf(`(?i)([\d,]+(?:\.\d+)?)\s*([^\d,]+?)\s+(?:%s)\s+([^\d,]+)`, orGroup.String())
|
|
||||||
re := regexp.MustCompile(regexPattern)
|
|
||||||
matches := re.FindStringSubmatch(query)
|
|
||||||
if len(matches) < 4 {
|
|
||||||
return 0, "", "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean and parse amount
|
|
||||||
amountStr := strings.ReplaceAll(matches[1], ",", "")
|
|
||||||
amount, err := strconv.ParseFloat(amountStr, 64)
|
|
||||||
if err != nil {
|
|
||||||
return 0, "", "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize currency symbols
|
|
||||||
currencyMap := map[string]string{
|
|
||||||
// Major Global Currencies
|
|
||||||
"$": "USD", "usd": "USD", "dollar": "USD", "dollars": "USD", "buck": "USD", "bucks": "USD", "us dollar": "USD", "american dollar": "USD", "freedom units": "USD",
|
|
||||||
"€": "EUR", "eur": "EUR", "euro": "EUR", "euros": "EUR",
|
|
||||||
"£": "GBP", "gbp": "GBP", "pound": "GBP", "pounds": "GBP", "sterling": "GBP", "quid": "GBP", "pound sterling": "GBP",
|
|
||||||
"¥": "JPY", "jpy": "JPY", "yen": "JPY", "cn¥": "CNY", // Handle ¥ ambiguity with CN¥ for Chinese Yuan
|
|
||||||
"₩": "KRW", "krw": "KRW", "won": "KRW", "korean won": "KRW",
|
|
||||||
"₹": "INR", "inr": "INR", "rupee": "INR", "rupees": "INR", "indian rupee": "INR",
|
|
||||||
"₽": "RUB", "rub": "RUB", "ruble": "RUB", "rubles": "RUB", "russian ruble": "RUB",
|
|
||||||
|
|
||||||
// Americas
|
|
||||||
"c$": "CAD", "cad": "CAD", "canadian dollar": "CAD", "loonie": "CAD",
|
|
||||||
"a$": "AUD", "aud": "AUD", "australian dollar": "AUD", "aussie dollar": "AUD",
|
|
||||||
"nz$": "NZD", "nzd": "NZD", "new zealand dollar": "NZD", "kiwi": "NZD", "kiwi dollar": "NZD",
|
|
||||||
"r$": "BRL", "brl": "BRL", "real": "BRL", "reais": "BRL", "brazilian real": "BRL",
|
|
||||||
"mx$": "MXN", "mxn": "MXN", "mexican peso": "MXN", "mexican pesos": "MXN",
|
|
||||||
"col$": "COP", "cop": "COP", "colombian peso": "COP",
|
|
||||||
"s/": "PEN", "pen": "PEN", "sol": "PEN", "soles": "PEN", "peruvian sol": "PEN",
|
|
||||||
"clp$": "CLP", "clp": "CLP", "chilean peso": "CLP",
|
|
||||||
"arg$": "ARS", "ars": "ARS", "argentine peso": "ARS",
|
|
||||||
|
|
||||||
// Europe & CIS
|
|
||||||
"chf": "CHF", "fr": "CHF", "swiss franc": "CHF", "franc suisse": "CHF",
|
|
||||||
"sek": "SEK", "kr": "SEK", "swedish krona": "SEK", "swedish kronor": "SEK",
|
|
||||||
"nok": "NOK", "norwegian krone": "NOK", "norwegian kroner": "NOK",
|
|
||||||
"dkk": "DKK", "danish krone": "DKK", "danish kroner": "DKK",
|
|
||||||
"zł": "PLN", "pln": "PLN", "zloty": "PLN", "polish zloty": "PLN",
|
|
||||||
"tl": "TRY", "try": "TRY", "turkish lira": "TRY", "türk lirası": "TRY", "₺": "TRY",
|
|
||||||
"huf": "HUF", "ft": "HUF", "forint": "HUF", "hungarian forint": "HUF",
|
|
||||||
"czk": "CZK", "kč": "CZK", "czech koruna": "CZK",
|
|
||||||
"ron": "RON", "lei": "RON", "romanian leu": "RON",
|
|
||||||
"bgn": "BGN", "лв": "BGN", "bulgarian lev": "BGN",
|
|
||||||
"uah": "UAH", "₴": "UAH", "hryvnia": "UAH", "ukrainian hryvnia": "UAH",
|
|
||||||
"kzt": "KZT", "₸": "KZT", "tenge": "KZT", "kazakhstani tenge": "KZT",
|
|
||||||
|
|
||||||
// Asia/Pacific
|
|
||||||
"cny": "CNY", "rmb": "CNY", "yuan": "CNY", "renminbi": "CNY", "chinese yuan": "CNY",
|
|
||||||
"hk$": "HKD", "hkd": "HKD", "hong kong dollar": "HKD",
|
|
||||||
"s$": "SGD", "sgd": "SGD", "singapore dollar": "SGD",
|
|
||||||
"nt$": "TWD", "twd": "TWD", "taiwan dollar": "TWD", "new taiwan dollar": "TWD",
|
|
||||||
"฿": "THB", "thb": "THB", "baht": "THB", "thai baht": "THB",
|
|
||||||
"rp": "IDR", "idr": "IDR", "rupiah": "IDR", "indonesian rupiah": "IDR",
|
|
||||||
"₱": "PHP", "php": "PHP", "philippine peso": "PHP",
|
|
||||||
"rm": "MYR", "myr": "MYR", "ringgit": "MYR", "malaysian ringgit": "MYR",
|
|
||||||
"₫": "VND", "vnd": "VND", "dong": "VND", "vietnamese dong": "VND",
|
|
||||||
"₭": "LAK", "lak": "LAK", "kip": "LAK", "lao kip": "LAK",
|
|
||||||
"៛": "KHR", "khr": "KHR", "riel": "KHR", "cambodian riel": "KHR",
|
|
||||||
|
|
||||||
// Middle East & Africa
|
|
||||||
"₪": "ILS", "ils": "ILS", "shekel": "ILS", "new israeli shekel": "ILS",
|
|
||||||
"﷼": "SAR", "sr": "SAR", "sar": "SAR", "riyal": "SAR", "saudi riyal": "SAR",
|
|
||||||
"د.إ": "AED", "dh": "AED", "aed": "AED", "dirham": "AED", "uae dirham": "AED",
|
|
||||||
"egp": "EGP", "e£": "EGP", "egyptian pound": "EGP",
|
|
||||||
"zar": "ZAR", "r": "ZAR", "rand": "ZAR", "south african rand": "ZAR",
|
|
||||||
"₦": "NGN", "ngn": "NGN", "naira": "NGN", "nigerian naira": "NGN",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Improved normalization function
|
|
||||||
normalizeCurrency := func(input string) string {
|
|
||||||
clean := strings.TrimSpace(strings.ToLower(input))
|
|
||||||
clean = strings.Join(strings.Fields(clean), " ")
|
|
||||||
// Direct map
|
|
||||||
if mapped, ok := currencyMap[clean]; ok {
|
|
||||||
return mapped
|
|
||||||
}
|
|
||||||
// Fuzzy match: for last word
|
|
||||||
words := strings.Fields(clean)
|
|
||||||
for i := 0; i < len(words); i++ {
|
|
||||||
sub := strings.Join(words[i:], " ")
|
|
||||||
if mapped, ok := currencyMap[sub]; ok {
|
|
||||||
return mapped
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Fuzzy match: try reducing phrase from the end
|
|
||||||
for i := len(words) - 1; i >= 0; i-- {
|
|
||||||
sub := strings.Join(words[:i], " ")
|
|
||||||
if mapped, ok := currencyMap[sub]; ok {
|
|
||||||
return mapped
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Handle currency symbols at the end (e.g. "100usd")
|
|
||||||
if len(clean) > 1 {
|
|
||||||
if symbol, ok := currencyMap[string(clean[len(clean)-1])]; ok {
|
|
||||||
return symbol
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Currency code fallback
|
|
||||||
if len(clean) == 3 {
|
|
||||||
upper := strings.ToUpper(clean)
|
|
||||||
exchangeCacheMutex.RLock()
|
|
||||||
defer exchangeCacheMutex.RUnlock()
|
|
||||||
if _, exists := exchangeRates[upper]; exists {
|
|
||||||
return upper
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return strings.ToUpper(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
fromCurr := normalizeCurrency(matches[2])
|
|
||||||
toCurr := normalizeCurrency(matches[3])
|
|
||||||
|
|
||||||
// Validate currencies exist in exchange rates
|
|
||||||
exchangeCacheMutex.RLock()
|
|
||||||
defer exchangeCacheMutex.RUnlock()
|
|
||||||
if _, fromExists := exchangeRates[fromCurr]; !fromExists {
|
|
||||||
return 0, "", "", false
|
|
||||||
}
|
|
||||||
if _, toExists := exchangeRates[toCurr]; !toExists {
|
|
||||||
return 0, "", "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
return amount, fromCurr, toCurr, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConvertCurrency handles the actual conversion
|
|
||||||
func ConvertCurrency(amount float64, from, to string) (float64, bool) {
|
|
||||||
if from == to {
|
|
||||||
return amount, true
|
|
||||||
}
|
|
||||||
|
|
||||||
rate, ok := GetExchangeRate(from, to)
|
|
||||||
if !ok {
|
|
||||||
// Try to find similar currencies
|
|
||||||
from = strings.ToUpper(from)
|
|
||||||
to = strings.ToUpper(to)
|
|
||||||
|
|
||||||
// Check if we have the currency in our list
|
|
||||||
exchangeCacheMutex.RLock()
|
|
||||||
defer exchangeCacheMutex.RUnlock()
|
|
||||||
|
|
||||||
_, fromExists := exchangeRates[from]
|
|
||||||
_, toExists := exchangeRates[to]
|
|
||||||
|
|
||||||
if !fromExists || !toExists {
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Shouldn't happen due to the check above, but just in case
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
return amount * rate, true
|
|
||||||
}
|
|
100
ia-main.go
100
ia-main.go
|
@ -1,100 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type InstantAnswer struct {
|
|
||||||
Type string // "calc", "unit_convert", "wiki", ...
|
|
||||||
Title string
|
|
||||||
Content interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func detectInstantAnswer(query string) *InstantAnswer {
|
|
||||||
// Try currency conversion first (more specific)
|
|
||||||
if amount, from, to, ok := ParseCurrencyConversion(query); ok {
|
|
||||||
if result, ok := ConvertCurrency(amount, from, to); ok {
|
|
||||||
return &InstantAnswer{
|
|
||||||
Type: "currency",
|
|
||||||
Title: "Currency Conversion",
|
|
||||||
Content: map[string]interface{}{
|
|
||||||
"from": from,
|
|
||||||
"to": to,
|
|
||||||
"amount": amount,
|
|
||||||
"result": result,
|
|
||||||
"display": fmt.Sprintf("%.2f %s = %.2f %s", amount, from, result, to),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try math expression
|
|
||||||
if result, ok := parseMathExpression(query); ok {
|
|
||||||
return &InstantAnswer{
|
|
||||||
Type: "calc",
|
|
||||||
Title: "Calculation Result",
|
|
||||||
Content: result,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try weather instant answer
|
|
||||||
if city, forecast, ok := getWeatherForQuery(query); ok {
|
|
||||||
return &InstantAnswer{
|
|
||||||
Type: "weather",
|
|
||||||
Title: fmt.Sprintf("Weather in %s", city.Name),
|
|
||||||
Content: map[string]interface{}{
|
|
||||||
"city": city.Name,
|
|
||||||
"country": city.Country,
|
|
||||||
"lat": city.Lat,
|
|
||||||
"lon": city.Lon,
|
|
||||||
"current": forecast.Current,
|
|
||||||
"forecast": forecast.Forecast,
|
|
||||||
"display": fmt.Sprintf("%.1f°C, %s", forecast.Current.Temperature, forecast.Current.Condition),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try Wikipedia search
|
|
||||||
if title, text, link, ok := getWikipediaSummary(query); ok {
|
|
||||||
return &InstantAnswer{
|
|
||||||
Type: "wiki",
|
|
||||||
Title: title,
|
|
||||||
Content: map[string]string{
|
|
||||||
"text": text,
|
|
||||||
"link": link,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func initExchangeRates() {
|
|
||||||
// Initial synchronous load
|
|
||||||
if err := UpdateExchangeRates(); err != nil {
|
|
||||||
printErr("Initial exchange rate update failed: %v", err)
|
|
||||||
} else {
|
|
||||||
PrecacheAllCurrencyPairs()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pre-cache common wiki terms in background
|
|
||||||
go func() {
|
|
||||||
commonTerms := []string{"United States", "Europe", "Technology", "Science", "Mathematics"}
|
|
||||||
for _, term := range commonTerms {
|
|
||||||
getWikipediaSummary(term)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Periodically update cache
|
|
||||||
ticker := time.NewTicker(30 * time.Minute)
|
|
||||||
go func() {
|
|
||||||
for range ticker.C {
|
|
||||||
if err := UpdateExchangeRates(); err != nil {
|
|
||||||
printWarn("Periodic exchange rate update failed: %v", err)
|
|
||||||
} else {
|
|
||||||
PrecacheAllCurrencyPairs()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
367
ia-weather.go
367
ia-weather.go
|
@ -1,367 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"golang.org/x/text/unicode/norm"
|
|
||||||
)
|
|
||||||
|
|
||||||
type WeatherCity struct {
|
|
||||||
Name string
|
|
||||||
Country string
|
|
||||||
Lat float64
|
|
||||||
Lon float64
|
|
||||||
}
|
|
||||||
type WeatherCurrent struct {
|
|
||||||
Temperature float64
|
|
||||||
Wind float64
|
|
||||||
Humidity int
|
|
||||||
Condition string
|
|
||||||
}
|
|
||||||
type WeatherDay struct {
|
|
||||||
Date string
|
|
||||||
MinTemp float64
|
|
||||||
MaxTemp float64
|
|
||||||
Condition string
|
|
||||||
}
|
|
||||||
type WeatherForecast struct {
|
|
||||||
Current WeatherCurrent
|
|
||||||
Forecast []WeatherDay
|
|
||||||
}
|
|
||||||
|
|
||||||
func getWeatherForQuery(query string) (city WeatherCity, forecast WeatherForecast, ok bool) {
|
|
||||||
// Expanded multi-language weather keywords (40+ languages/dialects)
|
|
||||||
weatherWords := []string{
|
|
||||||
// English
|
|
||||||
"weather", "forecast", "temperature", "conditions", "meteorology", "outlook",
|
|
||||||
// Czech/Slovak
|
|
||||||
"počasí", "předpověď", "teplota", "vlhkost", "srážky", "vítr", "meteo",
|
|
||||||
// German
|
|
||||||
"wetter", "vorhersage", "temperatur", "wettervorhersage", "wetterbericht",
|
|
||||||
// French
|
|
||||||
"météo", "prévisions", "température", "conditions météo", "prévision météo",
|
|
||||||
// Spanish
|
|
||||||
"tiempo", "clima", "pronóstico", "temperatura", "meteorología", "previsión",
|
|
||||||
// Italian
|
|
||||||
"tempo", "meteo", "previsioni", "temperatura", "condizioni atmosferiche",
|
|
||||||
// Portuguese
|
|
||||||
"tempo", "clima", "previsão", "temperatura", "meteorologia",
|
|
||||||
// Polish
|
|
||||||
"pogoda", "prognoza", "temperatura", "warunki atmosferyczne",
|
|
||||||
// Russian
|
|
||||||
"погода", "прогноз", "температура", "метео", "метеопрогноз",
|
|
||||||
// Ukrainian
|
|
||||||
"погода", "прогноз", "температура", "метео",
|
|
||||||
// Dutch
|
|
||||||
"weer", "voorspelling", "temperatuur", "weersverwachting",
|
|
||||||
// Scandinavian
|
|
||||||
"väder", "prognos", "temperatur", // Swedish
|
|
||||||
"vær", "prognose", "temperatur", // Norwegian/Danish
|
|
||||||
"veður", "spá", "hitastig", // Icelandic
|
|
||||||
// East Asian
|
|
||||||
"天気", "予報", "気温", // Japanese (tenki, yohō, kion)
|
|
||||||
"날씨", "예보", "기온", // Korean (nalssi, yebo, gion)
|
|
||||||
"天气", "预报", "气温", // Chinese (tiānqì, yùbào, qìwēn)
|
|
||||||
// South Asian
|
|
||||||
"मौसम", "पूर्वानुमान", "तापमान", // Hindi (mausam, purvanumaan, taapmaan)
|
|
||||||
"আবহাওয়া", "পূর্বাভাস", "তাপমাত্রা", // Bengali (ābhawāẏā, pūrbābhāsa, tāpamātrā)
|
|
||||||
// Middle Eastern
|
|
||||||
"طقس", "توقعات", "درجة الحرارة", // Arabic (ṭaqs, tawaqquʿāt, darajat al-ḥarāra)
|
|
||||||
"آب و ہوا", "پیش گوئی", "درجہ حرارت", // Urdu (āb-o-hawā, peshgoī, daraja ḥarārat)
|
|
||||||
// Turkish
|
|
||||||
"hava", "tahmin", "sıcaklık", "hava durumu",
|
|
||||||
// Greek
|
|
||||||
"καιρός", "πρόβλεψη", "θερμοκρασία",
|
|
||||||
// Hebrew
|
|
||||||
"מזג אוויר", "תחזית", "טמפרטורה",
|
|
||||||
// Other European
|
|
||||||
"időkép", "előrejelzés", "hőmérséklet", // Hungarian
|
|
||||||
"vreme", "prognoză", "temperatură", // Romanian
|
|
||||||
"vrijeme", "prognoza", "temperatura", // Croatian/Serbian
|
|
||||||
// Global/Internet slang
|
|
||||||
"temp", "wx", "meteo", "wea", "forec",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enhanced multi-language prepositions
|
|
||||||
prepositions := []string{
|
|
||||||
// English
|
|
||||||
"in", "at", "for", "around", "near",
|
|
||||||
// Czech/Slovak
|
|
||||||
"v", "ve", "na", "do", "u", "při", "blízko", "okolí",
|
|
||||||
// German
|
|
||||||
"in", "bei", "an", "für", "um", "nahe",
|
|
||||||
// Romance
|
|
||||||
"en", "a", "au", "aux", "dans", // French
|
|
||||||
"en", "a", "de", // Spanish
|
|
||||||
"a", "in", "da", // Italian
|
|
||||||
"em", "no", "na", // Portuguese
|
|
||||||
// Slavic
|
|
||||||
"w", "we", "na", "dla", "pod", // Polish
|
|
||||||
"в", "на", "у", "к", "под", // Russian/Ukrainian
|
|
||||||
// Nordic
|
|
||||||
"i", "på", "hos", // Swedish/Danish/Norwegian
|
|
||||||
// Others
|
|
||||||
"في", "عند", "قرب", // Arabic (fī, ʿind, qurb)
|
|
||||||
"में", "पर", "के पास", // Hindi (mẽ, par, ke pās)
|
|
||||||
"で", "に", "の近く", // Japanese (de, ni, no chikaku)
|
|
||||||
"에서", "에", "근처", // Korean (eseo, e, geuncheo)
|
|
||||||
"在", "于", "附近", // Chinese (zài, yú, fùjìn)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Always normalize query (lowercase + remove diacritics)
|
|
||||||
normalized := removeDiacritics(strings.ToLower(query))
|
|
||||||
hasWeather := false
|
|
||||||
for _, word := range weatherWords {
|
|
||||||
if strings.Contains(normalized, removeDiacritics(word)) {
|
|
||||||
hasWeather = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !hasWeather {
|
|
||||||
return city, forecast, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Improved location extraction with diacritic handling
|
|
||||||
loc := extractWeatherLocation(normalized, weatherWords, prepositions)
|
|
||||||
if loc == "" {
|
|
||||||
return city, forecast, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Geocode and get weather
|
|
||||||
return geocodeAndGetWeather(loc)
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractWeatherLocation(query string, weatherWords, prepositions []string) string {
|
|
||||||
// Create normalized versions for matching
|
|
||||||
normWeatherWords := make([]string, len(weatherWords))
|
|
||||||
for i, w := range weatherWords {
|
|
||||||
normWeatherWords[i] = removeDiacritics(w)
|
|
||||||
}
|
|
||||||
|
|
||||||
normPrepositions := make([]string, len(prepositions))
|
|
||||||
for i, p := range prepositions {
|
|
||||||
normPrepositions[i] = removeDiacritics(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pattern 1: [weather_word] [preposition]? [location]
|
|
||||||
pattern1 := `(?:` + strings.Join(normWeatherWords, "|") + `)\s*(?:` + strings.Join(normPrepositions, "|") + `)?\s*(.+)`
|
|
||||||
re1 := regexp.MustCompile(pattern1)
|
|
||||||
if matches := re1.FindStringSubmatch(query); len(matches) > 1 {
|
|
||||||
loc := cleanLocation(matches[1], normPrepositions)
|
|
||||||
if loc != "" {
|
|
||||||
return loc
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pattern 2: [location] [weather_word]
|
|
||||||
pattern2 := `(.+?)\s+(?:` + strings.Join(normWeatherWords, "|") + `)`
|
|
||||||
re2 := regexp.MustCompile(pattern2)
|
|
||||||
if matches := re2.FindStringSubmatch(query); len(matches) > 1 {
|
|
||||||
loc := cleanLocation(matches[1], normPrepositions)
|
|
||||||
if loc != "" {
|
|
||||||
return loc
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pattern 3: Question format
|
|
||||||
questionPattern := `(?:how is|what is|what's|jak[ée]\s+je|wie ist|quel est|qu[eé]\s+tal|com'[èe])\s+(?:the )?(?:` +
|
|
||||||
strings.Join(normWeatherWords, "|") + `)\s*(?:` + strings.Join(normPrepositions, "|") + `)?\s*(.+)`
|
|
||||||
re3 := regexp.MustCompile(questionPattern)
|
|
||||||
if matches := re3.FindStringSubmatch(query); len(matches) > 1 {
|
|
||||||
loc := cleanLocation(matches[1], normPrepositions)
|
|
||||||
if loc != "" {
|
|
||||||
return loc
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback with smarter exclusion
|
|
||||||
return extractByExclusion(query, normWeatherWords, normPrepositions)
|
|
||||||
}
|
|
||||||
|
|
||||||
func cleanLocation(loc string, prepositions []string) string {
|
|
||||||
// Create preposition set
|
|
||||||
prepSet := make(map[string]bool)
|
|
||||||
for _, p := range prepositions {
|
|
||||||
prepSet[p] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
words := strings.Fields(loc)
|
|
||||||
|
|
||||||
// Remove leading prepositions
|
|
||||||
for len(words) > 0 && prepSet[words[0]] {
|
|
||||||
words = words[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove trailing prepositions
|
|
||||||
for len(words) > 0 && prepSet[words[len(words)-1]] {
|
|
||||||
words = words[:len(words)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rejoin and clean
|
|
||||||
cleaned := strings.Join(words, " ")
|
|
||||||
return strings.Trim(cleaned, ",.?!:;()[]{}'\"")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove diacritics implementation
|
|
||||||
func removeDiacritics(s string) string {
|
|
||||||
var result []rune
|
|
||||||
for _, r := range norm.NFD.String(s) {
|
|
||||||
if unicode.Is(unicode.Mn, r) { // Mn: nonspacing marks
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
result = append(result, r)
|
|
||||||
}
|
|
||||||
return string(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract location by removing weather-related words
|
|
||||||
func extractByExclusion(query string, weatherWords, prepositions []string) string {
|
|
||||||
// Create removal set
|
|
||||||
removeSet := make(map[string]bool)
|
|
||||||
for _, w := range weatherWords {
|
|
||||||
removeSet[w] = true
|
|
||||||
}
|
|
||||||
for _, p := range prepositions {
|
|
||||||
removeSet[p] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process query words
|
|
||||||
words := strings.Fields(query)
|
|
||||||
var locWords []string
|
|
||||||
for _, word := range words {
|
|
||||||
if !removeSet[word] {
|
|
||||||
locWords = append(locWords, word)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
loc := strings.Join(locWords, " ")
|
|
||||||
return cleanLocation(loc, prepositions)
|
|
||||||
}
|
|
||||||
|
|
||||||
// // Improved location cleaning
|
|
||||||
// func cleanLocation(loc string) string {
|
|
||||||
// loc = strings.Trim(loc, ",.?!:;()[]{}'\"")
|
|
||||||
|
|
||||||
// // Remove trailing verbs
|
|
||||||
// verbs := []string{"is", "at", "for", "in", "v", "ve", "na", "do", "w", "en", "a"}
|
|
||||||
// for _, v := range verbs {
|
|
||||||
// loc = strings.TrimSuffix(loc, " "+v)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// return loc
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // Remove diacritics implementation
|
|
||||||
// func removeDiacritics(s string) string {
|
|
||||||
// var result []rune
|
|
||||||
// for _, r := range norm.NFD.String(s) {
|
|
||||||
// if unicode.Is(unicode.Mn, r) { // Mn: nonspacing marks
|
|
||||||
// continue
|
|
||||||
// }
|
|
||||||
// result = append(result, r)
|
|
||||||
// }
|
|
||||||
// return string(result)
|
|
||||||
// }
|
|
||||||
|
|
||||||
func geocodeAndGetWeather(loc string) (WeatherCity, WeatherForecast, bool) {
|
|
||||||
var city WeatherCity
|
|
||||||
var forecast WeatherForecast
|
|
||||||
// 1. Geocode
|
|
||||||
geoURL := fmt.Sprintf("https://geocoding-api.open-meteo.com/v1/search?name=%s&count=1", urlQueryEscape(loc))
|
|
||||||
resp, err := http.Get(geoURL)
|
|
||||||
if err != nil {
|
|
||||||
return city, forecast, false
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
var geo struct {
|
|
||||||
Results []struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Country string `json:"country"`
|
|
||||||
Lat float64 `json:"latitude"`
|
|
||||||
Lon float64 `json:"longitude"`
|
|
||||||
} `json:"results"`
|
|
||||||
}
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&geo); err != nil || len(geo.Results) == 0 {
|
|
||||||
return city, forecast, false
|
|
||||||
}
|
|
||||||
g := geo.Results[0]
|
|
||||||
city = WeatherCity{
|
|
||||||
Name: g.Name,
|
|
||||||
Country: g.Country,
|
|
||||||
Lat: g.Lat,
|
|
||||||
Lon: g.Lon,
|
|
||||||
}
|
|
||||||
// 2. Weather (current + forecast)
|
|
||||||
weatherURL := fmt.Sprintf("https://api.open-meteo.com/v1/forecast?latitude=%f&longitude=%f¤t=temperature_2m,weather_code,wind_speed_10m,relative_humidity_2m&daily=temperature_2m_min,temperature_2m_max,weather_code&forecast_days=3&timezone=auto", g.Lat, g.Lon)
|
|
||||||
resp2, err := http.Get(weatherURL)
|
|
||||||
if err != nil {
|
|
||||||
return city, forecast, false
|
|
||||||
}
|
|
||||||
defer resp2.Body.Close()
|
|
||||||
var data struct {
|
|
||||||
Current struct {
|
|
||||||
Temp float64 `json:"temperature_2m"`
|
|
||||||
Wind float64 `json:"wind_speed_10m"`
|
|
||||||
Hum int `json:"relative_humidity_2m"`
|
|
||||||
Code int `json:"weather_code"`
|
|
||||||
} `json:"current"`
|
|
||||||
Daily struct {
|
|
||||||
Dates []string `json:"time"`
|
|
||||||
MinTemp []float64 `json:"temperature_2m_min"`
|
|
||||||
MaxTemp []float64 `json:"temperature_2m_max"`
|
|
||||||
Weather []int `json:"weather_code"`
|
|
||||||
} `json:"daily"`
|
|
||||||
}
|
|
||||||
body, _ := io.ReadAll(resp2.Body)
|
|
||||||
if err := json.Unmarshal(body, &data); err != nil {
|
|
||||||
return city, forecast, false
|
|
||||||
}
|
|
||||||
forecast.Current = WeatherCurrent{
|
|
||||||
Temperature: data.Current.Temp,
|
|
||||||
Wind: data.Current.Wind,
|
|
||||||
Humidity: data.Current.Hum,
|
|
||||||
Condition: weatherDescription(data.Current.Code),
|
|
||||||
}
|
|
||||||
for i := range data.Daily.Dates {
|
|
||||||
forecast.Forecast = append(forecast.Forecast, WeatherDay{
|
|
||||||
Date: data.Daily.Dates[i],
|
|
||||||
MinTemp: data.Daily.MinTemp[i],
|
|
||||||
MaxTemp: data.Daily.MaxTemp[i],
|
|
||||||
Condition: weatherDescription(data.Daily.Weather[i]),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return city, forecast, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func weatherDescription(code int) string {
|
|
||||||
// Minimal mapping, can be expanded
|
|
||||||
switch code {
|
|
||||||
case 0:
|
|
||||||
return "Clear"
|
|
||||||
case 1, 2, 3:
|
|
||||||
return "Partly cloudy"
|
|
||||||
case 45, 48:
|
|
||||||
return "Fog"
|
|
||||||
case 51, 53, 55, 56, 57:
|
|
||||||
return "Drizzle"
|
|
||||||
case 61, 63, 65, 66, 67, 80, 81, 82:
|
|
||||||
return "Rain"
|
|
||||||
case 71, 73, 75, 77, 85, 86:
|
|
||||||
return "Snow"
|
|
||||||
case 95, 96, 99:
|
|
||||||
return "Thunderstorm"
|
|
||||||
default:
|
|
||||||
return "Unknown"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper for safe query escaping
|
|
||||||
func urlQueryEscape(s string) string {
|
|
||||||
return strings.ReplaceAll(strings.ReplaceAll(s, " ", "+"), "%", "")
|
|
||||||
}
|
|
74
ia-wiki.go
74
ia-wiki.go
|
@ -1,74 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Wikipedia API response structure
|
|
||||||
type WikipediaResponse struct {
|
|
||||||
Query struct {
|
|
||||||
Pages map[string]struct {
|
|
||||||
PageID int `json:"pageid"`
|
|
||||||
Title string `json:"title"`
|
|
||||||
Extract string `json:"extract"`
|
|
||||||
} `json:"pages"`
|
|
||||||
} `json:"query"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Wikipedia summary
|
|
||||||
func getWikipediaSummary(query string) (title, text, link string, ok bool) {
|
|
||||||
// Clean and prepare query
|
|
||||||
query = strings.TrimSpace(query)
|
|
||||||
if query == "" {
|
|
||||||
return "", "", "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// API request
|
|
||||||
apiURL := fmt.Sprintf(
|
|
||||||
"https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&exintro&explaintext&redirects=1&titles=%s",
|
|
||||||
url.QueryEscape(query),
|
|
||||||
)
|
|
||||||
|
|
||||||
resp, err := http.Get(apiURL)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", "", false
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse JSON response
|
|
||||||
var result WikipediaResponse
|
|
||||||
if err := json.Unmarshal(body, &result); err != nil {
|
|
||||||
return "", "", "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract first valid page
|
|
||||||
for _, page := range result.Query.Pages {
|
|
||||||
if page.PageID == 0 || page.Extract == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format text
|
|
||||||
text = page.Extract
|
|
||||||
if len(text) > 500 {
|
|
||||||
text = text[:500] + "..."
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create link
|
|
||||||
titleForURL := strings.ReplaceAll(page.Title, " ", "_")
|
|
||||||
link = fmt.Sprintf("https://en.wikipedia.org/wiki/%s", url.PathEscape(titleForURL))
|
|
||||||
|
|
||||||
return page.Title, text, link, true
|
|
||||||
}
|
|
||||||
|
|
||||||
return "", "", "", false
|
|
||||||
}
|
|
|
@ -18,21 +18,8 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
|
||||||
// Build the search URL
|
// Build the search URL
|
||||||
searchURL := buildBingSearchURL(query, page)
|
searchURL := buildBingSearchURL(query, page)
|
||||||
|
|
||||||
// Create the HTTP request
|
// Make the HTTP request
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
resp, err := http.Get(searchURL)
|
||||||
if err != nil {
|
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set User-Agent
|
|
||||||
ImageUserAgent, err := GetUserAgent("Image-Search-Bing")
|
|
||||||
if err != nil {
|
|
||||||
return nil, 0, fmt.Errorf("generating User-Agent: %v", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", ImageUserAgent)
|
|
||||||
|
|
||||||
// Use MetaProxy if enabled
|
|
||||||
resp, err := DoMetaProxyRequest(req)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,15 +87,15 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the HTTP request
|
// Make the HTTP request with User-Agent header
|
||||||
|
client := &http.Client{}
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", DeviantArtImageUserAgent)
|
req.Header.Set("User-Agent", DeviantArtImageUserAgent)
|
||||||
|
|
||||||
// Perform the request using MetaProxy if enabled
|
resp, err := client.Do(req)
|
||||||
resp, err := DoMetaProxyRequest(req)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -182,7 +182,7 @@ func PerformDeviantArtImageSearch(query, safe, lang string, page int) ([]ImageSe
|
||||||
|
|
||||||
duration := time.Since(startTime)
|
duration := time.Since(startTime)
|
||||||
|
|
||||||
// Check if the number of results is zero
|
// Check if the number of results is one or less
|
||||||
if len(results) == 0 {
|
if len(results) == 0 {
|
||||||
return nil, duration, fmt.Errorf("no images found")
|
return nil, duration, fmt.Errorf("no images found")
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,21 +18,7 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
var results []ImageSearchResult
|
var results []ImageSearchResult
|
||||||
searchURL := buildImgurSearchURL(query, page)
|
searchURL := buildImgurSearchURL(query, page)
|
||||||
|
|
||||||
// Create the HTTP request
|
resp, err := http.Get(searchURL)
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the User-Agent string
|
|
||||||
imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
|
|
||||||
if err != nil {
|
|
||||||
return nil, 0, fmt.Errorf("getting user-agent: %v", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", imgurUserAgent)
|
|
||||||
|
|
||||||
// Perform the HTTP request with MetaProxy if enabled
|
|
||||||
resp, err := DoMetaProxyRequest(req)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -42,7 +28,6 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the HTML document
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
|
@ -91,35 +76,12 @@ func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
|
|
||||||
duration := time.Since(startTime) // Calculate the duration
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
if len(results) == 0 {
|
|
||||||
return nil, duration, fmt.Errorf("no images found")
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, duration, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
|
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
|
||||||
func scrapeImageFromImgurPage(pageURL string) string {
|
func scrapeImageFromImgurPage(pageURL string) string {
|
||||||
req, err := http.NewRequest("GET", pageURL, nil)
|
resp, err := http.Get(pageURL)
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("Error creating request for page: %v\n", err)
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the User-Agent string
|
|
||||||
imgurUserAgent, err := GetUserAgent("Image-Search-Imgur")
|
|
||||||
if err == nil {
|
|
||||||
req.Header.Set("User-Agent", imgurUserAgent)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Perform the request using MetaProxy if enabled
|
|
||||||
var resp *http.Response
|
|
||||||
if config.MetaProxyEnabled && metaProxyClient != nil {
|
|
||||||
resp, err = metaProxyClient.Do(req)
|
|
||||||
} else {
|
|
||||||
client := &http.Client{}
|
|
||||||
resp, err = client.Do(req)
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("Error fetching page: %v\n", err)
|
fmt.Printf("Error fetching page: %v\n", err)
|
||||||
return ""
|
return ""
|
||||||
|
|
|
@ -97,7 +97,7 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
|
|
||||||
// Ensure count + offset is within acceptable limits
|
// Ensure count + offset is within acceptable limits
|
||||||
if offset+resultsPerPage > 250 {
|
if offset+resultsPerPage > 250 {
|
||||||
return nil, 0, fmt.Errorf("count + offset must be lower than 250 for Qwant")
|
return nil, 0, fmt.Errorf("count + offset must be lower than 250 for quant")
|
||||||
}
|
}
|
||||||
|
|
||||||
if safe == "" {
|
if safe == "" {
|
||||||
|
@ -113,21 +113,21 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
offset,
|
offset,
|
||||||
safe)
|
safe)
|
||||||
|
|
||||||
// Create the HTTP request
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", apiURL, nil)
|
req, err := http.NewRequest("GET", apiURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("creating request: %v", err)
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the User-Agent string
|
|
||||||
ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
|
ImageUserAgent, err := GetUserAgent("Image-Search-Quant")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("getting user-agent: %v", err)
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", ImageUserAgent)
|
|
||||||
|
|
||||||
// Perform the request with MetaProxy if enabled
|
req.Header.Set("User-Agent", ImageUserAgent) // Quant seems to not like some specific User-Agent strings
|
||||||
resp, err := DoMetaProxyRequest(req)
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -137,13 +137,11 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the API response
|
|
||||||
var apiResp QwantAPIResponse
|
var apiResp QwantAPIResponse
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
||||||
return nil, 0, fmt.Errorf("decoding response: %v", err)
|
return nil, 0, fmt.Errorf("decoding response: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process the results
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
|
results := make([]ImageSearchResult, len(apiResp.Data.Result.Items))
|
||||||
|
|
||||||
|
@ -176,9 +174,5 @@ func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchR
|
||||||
|
|
||||||
duration := time.Since(startTime) // Calculate the duration
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
if len(results) == 0 {
|
|
||||||
return nil, duration, fmt.Errorf("no images found")
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, duration, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
59
images.go
59
images.go
|
@ -10,23 +10,12 @@ import (
|
||||||
|
|
||||||
var imageSearchEngines []SearchEngine
|
var imageSearchEngines []SearchEngine
|
||||||
|
|
||||||
var allImageSearchEngines = []SearchEngine{
|
func init() {
|
||||||
|
imageSearchEngines = []SearchEngine{
|
||||||
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
|
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch)},
|
||||||
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
|
{Name: "Bing", Func: wrapImageSearchFunc(PerformBingImageSearch)},
|
||||||
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
|
{Name: "DeviantArt", Func: wrapImageSearchFunc(PerformDeviantArtImageSearch)},
|
||||||
// {Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // example
|
//{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 4}, // Image proxy not working
|
||||||
}
|
|
||||||
|
|
||||||
func initImageEngines() {
|
|
||||||
imageSearchEngines = nil
|
|
||||||
|
|
||||||
for _, engineName := range config.MetaSearch.Image {
|
|
||||||
for _, candidate := range allImageSearchEngines {
|
|
||||||
if candidate.Name == engineName {
|
|
||||||
imageSearchEngines = append(imageSearchEngines, candidate)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,7 +44,7 @@ func handleImageSearch(w http.ResponseWriter, r *http.Request, settings UserSett
|
||||||
data := map[string]interface{}{
|
data := map[string]interface{}{
|
||||||
"Results": combinedResults,
|
"Results": combinedResults,
|
||||||
"Query": query,
|
"Query": query,
|
||||||
"Fetched": FormatElapsedTime(elapsedTime),
|
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
|
||||||
"Page": page,
|
"Page": page,
|
||||||
"HasPrevPage": page > 1,
|
"HasPrevPage": page > 1,
|
||||||
"HasNextPage": len(combinedResults) >= 50,
|
"HasNextPage": len(combinedResults) >= 50,
|
||||||
|
@ -97,8 +86,8 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
if config.MetaSearchEnabled {
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous, true)
|
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
combinedResults = filterValidImages(combinedResults)
|
combinedResults = filterValidImages(combinedResults)
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -107,13 +96,13 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
printDebug("Crawler disabled; skipping fetching from image search engines.")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
_, _, imageResults, _, _ := convertToSpecificResults(results)
|
_, _, imageResults, _ := convertToSpecificResults(results)
|
||||||
combinedResults = filterValidImages(imageResults)
|
combinedResults = filterValidImages(imageResults)
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
printDebug("Cache check timeout")
|
printDebug("Cache check timeout")
|
||||||
if config.MetaSearchEnabled {
|
if config.CrawlerEnabled {
|
||||||
combinedResults = fetchImageResults(query, safe, lang, page, synchronous, true)
|
combinedResults = fetchImageResults(query, safe, lang, page, synchronous)
|
||||||
if len(combinedResults) > 0 {
|
if len(combinedResults) > 0 {
|
||||||
combinedResults = filterValidImages(combinedResults)
|
combinedResults = filterValidImages(combinedResults)
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
@ -126,21 +115,15 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
return combinedResults
|
return combinedResults
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchImageResults(query, safe, lang string, page int, synchronous bool, thumbsNeeded bool) []ImageSearchResult {
|
func fetchImageResults(query, safe, lang string, page int, synchronous bool) []ImageSearchResult {
|
||||||
var results []ImageSearchResult
|
var results []ImageSearchResult
|
||||||
|
|
||||||
// Check if MetaSearchEnabled is false
|
// Check if CrawlerEnabled is false
|
||||||
if !config.MetaSearchEnabled {
|
if !config.CrawlerEnabled {
|
||||||
printDebug("Crawler is disabled; skipping image search engine fetching.")
|
printDebug("Crawler is disabled; skipping image search engine fetching.")
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will not happen as during config load there is check to have at least something in search engine list
|
|
||||||
// if len(imageSearchEngines) == 0 {
|
|
||||||
// printWarn("No image search engines configured in imageSearchEngines")
|
|
||||||
// return nil
|
|
||||||
// }
|
|
||||||
|
|
||||||
engineCount := len(imageSearchEngines)
|
engineCount := len(imageSearchEngines)
|
||||||
|
|
||||||
// Determine the engine to use based on the page number
|
// Determine the engine to use based on the page number
|
||||||
|
@ -180,7 +163,7 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool, thu
|
||||||
if config.DriveCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
// Cache the thumbnail image asynchronously
|
// Cache the thumbnail image asynchronously
|
||||||
go func(imgResult ImageSearchResult) {
|
go func(imgResult ImageSearchResult) {
|
||||||
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, "thumb")
|
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
|
||||||
if err != nil || !success {
|
if err != nil || !success {
|
||||||
printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
|
printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
|
||||||
removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
|
removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
|
||||||
|
@ -237,25 +220,23 @@ func fetchImageResults(query, safe, lang string, page int, synchronous bool, thu
|
||||||
imageURLMapMu.Unlock()
|
imageURLMapMu.Unlock()
|
||||||
|
|
||||||
if config.DriveCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
if thumbsNeeded {
|
// Cache the thumbnail image asynchronously
|
||||||
go func(imgResult ImageSearchResult) {
|
go func(imgResult ImageSearchResult) {
|
||||||
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, "thumb")
|
_, success, err := cacheImage(imgResult.Thumb, imgResult.ID, true)
|
||||||
if err != nil || !success {
|
if err != nil || !success {
|
||||||
printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
|
printWarn("Failed to cache thumbnail image %s: %v", imgResult.Thumb, err)
|
||||||
removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
|
removeImageResultFromCache(query, page, safe == "active", lang, imgResult.ID)
|
||||||
}
|
}
|
||||||
}(imageResult)
|
}(imageResult)
|
||||||
|
|
||||||
|
// Set ProxyThumb to the proxy URL (initially placeholder)
|
||||||
imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)
|
imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb.webp", hash)
|
||||||
} else {
|
|
||||||
imageResult.ProxyThumb = "" // fallback ?
|
// Set ProxyFull to the proxy URL
|
||||||
}
|
|
||||||
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
|
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
|
||||||
} else {
|
} else {
|
||||||
if thumbsNeeded {
|
// Hard cache disabled, proxy both thumb and full images
|
||||||
imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
|
imageResult.ProxyThumb = fmt.Sprintf("/image/%s_thumb", hash)
|
||||||
} else {
|
|
||||||
imageResult.ProxyThumb = ""
|
|
||||||
}
|
|
||||||
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
|
imageResult.ProxyFull = fmt.Sprintf("/image/%s_full", hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
148
init-extra.go
148
init-extra.go
|
@ -1,148 +0,0 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
)
|
|
||||||
|
|
||||||
var config Config
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
// Command-line flags
|
|
||||||
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
|
|
||||||
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
|
|
||||||
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
|
|
||||||
configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
|
|
||||||
|
|
||||||
// Parse command-line flags
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
// Override global configFilePath if --config flag is provided
|
|
||||||
if *configFlag != "" {
|
|
||||||
configFilePath = *configFlag
|
|
||||||
}
|
|
||||||
|
|
||||||
if *skipConfigFlag {
|
|
||||||
// Skip interactive configuration
|
|
||||||
if _, err := os.Stat(configFilePath); err == nil {
|
|
||||||
// Load from config file if it exists
|
|
||||||
config = loadConfig()
|
|
||||||
} else {
|
|
||||||
// Use defaults if config file does not exist
|
|
||||||
config = defaultConfig
|
|
||||||
saveConfig(config) // Save the defaults to config.ini
|
|
||||||
printInfo("Configuration saved to %s", configFilePath)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Initialize configuration interactively or from config file
|
|
||||||
err := initConfig()
|
|
||||||
if err != nil {
|
|
||||||
printErr("Error during initialization: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Override with command-line arguments if provided
|
|
||||||
if *portFlag != 0 {
|
|
||||||
config.Port = *portFlag
|
|
||||||
}
|
|
||||||
if *domainFlag != "" {
|
|
||||||
config.Domain = *domainFlag
|
|
||||||
}
|
|
||||||
|
|
||||||
loadNodeConfig()
|
|
||||||
|
|
||||||
if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
|
|
||||||
InitProxies()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initiate Browser Agent updater
|
|
||||||
if config.MetaSearchEnabled || config.IndexerEnabled {
|
|
||||||
go periodicAgentUpdate()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load List of Meta Search Engines
|
|
||||||
if config.MetaSearchEnabled {
|
|
||||||
initTextEngines()
|
|
||||||
initImageEngines()
|
|
||||||
initFileEngines()
|
|
||||||
initPipedInstances()
|
|
||||||
initMusicEngines()
|
|
||||||
initExchangeRates()
|
|
||||||
}
|
|
||||||
|
|
||||||
InitializeLanguage("en") // Initialize language before generating OpenSearch
|
|
||||||
generateOpenSearchXML(config)
|
|
||||||
|
|
||||||
// Start the node client only if NodesEnabled is true
|
|
||||||
if config.NodesEnabled {
|
|
||||||
go startUnixSocketServer(config.NodeID)
|
|
||||||
printInfo("Node client started.")
|
|
||||||
} else {
|
|
||||||
printInfo("Node client is disabled.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the cache directory exists when caching is enabled
|
|
||||||
if config.DriveCacheEnabled {
|
|
||||||
cacheDir := config.DriveCache.Path
|
|
||||||
imagesDir := filepath.Join(cacheDir, "images")
|
|
||||||
|
|
||||||
// Check if the directory already exists
|
|
||||||
if _, err := os.Stat(imagesDir); os.IsNotExist(err) {
|
|
||||||
// Try to create the directory since it doesn't exist
|
|
||||||
if err := os.MkdirAll(imagesDir, os.ModePerm); err != nil {
|
|
||||||
printErr("Error: Failed to create cache or images directory '%s': %v", imagesDir, err)
|
|
||||||
os.Exit(1) // Exit with a non-zero status to indicate an error
|
|
||||||
}
|
|
||||||
// Print a warning if the directory had to be created
|
|
||||||
printWarn("Warning: Created missing directory '%s'.", imagesDir)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start periodic cleanup of expired cache files
|
|
||||||
if config.DriveCacheEnabled {
|
|
||||||
go cleanExpiredCachedImages()
|
|
||||||
printInfo("Drive cache started.")
|
|
||||||
} else {
|
|
||||||
printInfo("Drive cache is disabled.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start periodic cleanup of expired cache files
|
|
||||||
if config.RamCacheEnabled {
|
|
||||||
resultsCache = NewResultsCache()
|
|
||||||
geocodeCache = NewGeocodeCache()
|
|
||||||
printInfo("RAM cache started.")
|
|
||||||
} else {
|
|
||||||
printInfo("RAM cache is disabled.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Init indexer
|
|
||||||
if config.IndexerEnabled {
|
|
||||||
if err := downloadAndSetupDomainsCSV(); err != nil {
|
|
||||||
printErr("Failed to set up domains.csv: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
err := InitIndex()
|
|
||||||
if err != nil {
|
|
||||||
printErr("Failed to initialize index: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
webCrawlerInit()
|
|
||||||
|
|
||||||
printInfo("Indexer is enabled.")
|
|
||||||
} else {
|
|
||||||
printInfo("Indexer is disabled.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// if len(config.MetaSearch.Text) == 0 {
|
|
||||||
// log.Fatal("No text search engines are enabled in config (MetaSearch.Text)")
|
|
||||||
// }
|
|
||||||
// fmt.Printf("Loaded config.MetaSearch.Text: %#v\n", config.MetaSearch.Text)
|
|
||||||
|
|
||||||
runServer()
|
|
||||||
}
|
|
72
init.go
72
init.go
|
@ -1,6 +1,3 @@
|
||||||
//go:build !experimental
|
|
||||||
// +build !experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -12,21 +9,14 @@ import (
|
||||||
var config Config
|
var config Config
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|
||||||
// Command-line flags
|
// Command-line flags
|
||||||
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
|
portFlag := flag.Int("port", 0, "Port number to run the application (overrides config)")
|
||||||
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
|
domainFlag := flag.String("domain", "", "Domain address for the application (overrides config)")
|
||||||
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
|
skipConfigFlag := flag.Bool("skip-config-check", false, "Skip interactive prompts and load config.ini")
|
||||||
configFlag := flag.String("config", "", "Path to configuration file (overrides default)")
|
|
||||||
|
|
||||||
// Parse command-line flags
|
// Parse command-line flags
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
|
|
||||||
// Override global configFilePath if --config flag is provided
|
|
||||||
if *configFlag != "" {
|
|
||||||
configFilePath = *configFlag
|
|
||||||
}
|
|
||||||
|
|
||||||
if *skipConfigFlag {
|
if *skipConfigFlag {
|
||||||
// Skip interactive configuration
|
// Skip interactive configuration
|
||||||
if _, err := os.Stat(configFilePath); err == nil {
|
if _, err := os.Stat(configFilePath); err == nil {
|
||||||
|
@ -55,28 +45,37 @@ func main() {
|
||||||
config.Domain = *domainFlag
|
config.Domain = *domainFlag
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.MetaProxyEnabled {
|
loadNodeConfig()
|
||||||
InitProxies()
|
|
||||||
|
if config.AuthCode == "" {
|
||||||
|
config.AuthCode = generateStrongRandomString(64)
|
||||||
|
printInfo("Generated connection code: %s\n", config.AuthCode)
|
||||||
|
saveConfig(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generate Host ID
|
||||||
|
hostID, nodeErr := generateHostID()
|
||||||
|
if nodeErr != nil {
|
||||||
|
printErr("Failed to generate host ID: %v", nodeErr)
|
||||||
|
}
|
||||||
|
config.PeerID = hostID
|
||||||
|
|
||||||
// Initiate Browser Agent updater
|
// Initiate Browser Agent updater
|
||||||
if config.MetaSearchEnabled || config.IndexerEnabled {
|
if config.CrawlerEnabled || config.IndexerEnabled {
|
||||||
go periodicAgentUpdate()
|
go periodicAgentUpdate()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load List of Meta Search Engines
|
|
||||||
if config.MetaSearchEnabled {
|
|
||||||
initTextEngines()
|
|
||||||
initImageEngines()
|
|
||||||
initFileEngines()
|
|
||||||
initPipedInstances()
|
|
||||||
initMusicEngines()
|
|
||||||
initExchangeRates()
|
|
||||||
}
|
|
||||||
|
|
||||||
InitializeLanguage("en") // Initialize language before generating OpenSearch
|
InitializeLanguage("en") // Initialize language before generating OpenSearch
|
||||||
generateOpenSearchXML(config)
|
generateOpenSearchXML(config)
|
||||||
|
|
||||||
|
// Start the node client only if NodesEnabled is true
|
||||||
|
if config.NodesEnabled {
|
||||||
|
go startNodeClient()
|
||||||
|
printInfo("Node client started.")
|
||||||
|
} else {
|
||||||
|
printInfo("Node client is disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
// Check if the cache directory exists when caching is enabled
|
// Check if the cache directory exists when caching is enabled
|
||||||
if config.DriveCacheEnabled {
|
if config.DriveCacheEnabled {
|
||||||
cacheDir := config.DriveCache.Path
|
cacheDir := config.DriveCache.Path
|
||||||
|
@ -111,10 +110,29 @@ func main() {
|
||||||
printInfo("RAM cache is disabled.")
|
printInfo("RAM cache is disabled.")
|
||||||
}
|
}
|
||||||
|
|
||||||
// if len(config.MetaSearch.Text) == 0 {
|
// Init indexer
|
||||||
// log.Fatal("No text search engines are enabled in config (MetaSearch.Text)")
|
if config.IndexerEnabled {
|
||||||
// }
|
if err := downloadAndSetupDomainsCSV(); err != nil {
|
||||||
// fmt.Printf("Loaded config.MetaSearch.Text: %#v\n", config.MetaSearch.Text)
|
printErr("Failed to set up domains.csv: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := InitIndex()
|
||||||
|
if err != nil {
|
||||||
|
printErr("Failed to initialize index: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
webCrawlerInit()
|
||||||
|
|
||||||
|
// No longer needed as crawled data are indexed imidietly
|
||||||
|
// // Start periodic indexing (every 2 minutes)
|
||||||
|
// dataFilePath := filepath.Join(config.DriveCache.Path, "data_to_index.txt")
|
||||||
|
// startPeriodicIndexing(dataFilePath, 2*time.Minute)
|
||||||
|
|
||||||
|
printInfo("Indexer is enabled.")
|
||||||
|
} else {
|
||||||
|
printInfo("Indexer is disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
runServer()
|
runServer()
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Soek vir nuwe resultate"
|
msgstr "Soek vir nuwe resultate..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Vorige"
|
msgstr "Vorige"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Volgende"
|
msgstr "Volgende"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Verkry in %s"
|
msgstr "Verkry in %s sekondes"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Aantal saaiers"
|
msgstr "Aantal saaiers"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Strate"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelliet"
|
msgstr "Satelliet"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Esri Satelliet"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografiese"
|
msgstr "Topografiese"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Jy is binne "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meter van hierdie punt af"
|
msgstr "meter van hierdie punt af"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekondes"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekondes"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "تورنتات"
|
msgstr "تورنتات"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "جاري البحث عن نتائج جديدة"
|
msgstr "جاري البحث عن نتائج جديدة..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "السابق"
|
msgstr "السابق"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "التالي"
|
msgstr "التالي"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "تم التحميل في %s"
|
msgstr "تم التحميل في %s ثوانٍ"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "عدد المزودين"
|
msgstr "عدد المزودين"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "شوارع"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "قمر صناعي"
|
msgstr "قمر صناعي"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "قمر صناعي ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "طوبوغرافي"
|
msgstr "طوبوغرافي"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "أنت على بعد "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "أمتار من هذه النقطة"
|
msgstr "أمتار من هذه النقطة"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "ثواني"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "ميلي ثانية"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Торэнты"
|
msgstr "Торэнты"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Пошук новых вынікаў"
|
msgstr "Пошук новых вынікаў..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Папярэдняе"
|
msgstr "Папярэдняе"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Наступнае"
|
msgstr "Наступнае"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Загружана за %s"
|
msgstr "Загружана за %s секунд"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Па колькасці сейдэраў"
|
msgstr "Па колькасці сейдэраў"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Вуліцы"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Спадарожнік"
|
msgstr "Спадарожнік"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Спадарожнік ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Тапаграфічная"
|
msgstr "Тапаграфічная"
|
||||||
|
@ -197,8 +199,3 @@ msgstr "Вы знаходзіцеся на адлегласці"
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "метраў ад гэтага пункта"
|
msgstr "метраў ад гэтага пункта"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Секунды"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Мілісекунды"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Торенти"
|
msgstr "Торенти"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Търсят се нови резултати"
|
msgstr "Търсят се нови резултати..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Предишен"
|
msgstr "Предишен"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Следващ"
|
msgstr "Следващ"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Заредено за %s"
|
msgstr "Заредено за %s секунди"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Сийдъри (качване)"
|
msgstr "Сийдъри (качване)"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Улици"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Сателит"
|
msgstr "Сателит"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Сателит"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Топографска"
|
msgstr "Топографска"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Намирате се на "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "метра от тази точка"
|
msgstr "метра от тази точка"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Секунди"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Милисекунди"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Cercant nous resultats"
|
msgstr "Cercant nous resultats..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Anterior"
|
msgstr "Anterior"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Següent"
|
msgstr "Següent"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Recuperat en %s"
|
msgstr "Recuperat en %s segons"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ordena per fonts"
|
msgstr "Ordena per fonts"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Carrers"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satèl·lit"
|
msgstr "Satèl·lit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satèl·lit ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topogràfic"
|
msgstr "Topogràfic"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Ets a "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metres d'aquest punt"
|
msgstr "metres d'aquest punt"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Segons"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Mil·lisegons"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenty"
|
msgstr "Torrenty"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Hledám nové výsledky"
|
msgstr "Hledám nové výsledky..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Předchozí"
|
msgstr "Předchozí"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Další"
|
msgstr "Další"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Načteno za %s"
|
msgstr "Načteno za %s sekund"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Počet seedů"
|
msgstr "Počet seedů"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ulice"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelitní"
|
msgstr "Satelitní"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Esri Satelitní"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografická"
|
msgstr "Topografická"
|
||||||
|
@ -196,8 +198,3 @@ msgstr "Jste v dosahu "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metrů od tohoto bodu"
|
msgstr "metrů od tohoto bodu"
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundy"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekundy"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenter"
|
msgstr "Torrenter"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Søger efter nye resultater"
|
msgstr "Søger efter nye resultater..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Forrige"
|
msgstr "Forrige"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Næste"
|
msgstr "Næste"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Hentet på %s"
|
msgstr "Hentet på %s sekunder"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sorter efter seeders"
|
msgstr "Sorter efter seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Gader"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellit"
|
msgstr "Satellit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satellit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografisk"
|
msgstr "Topografisk"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Du er inden for "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meter fra dette punkt"
|
msgstr "meter fra dette punkt"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunder"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekunder"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Suche nach neuen Ergebnissen"
|
msgstr "Suche nach neuen Ergebnissen..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Vorherige"
|
msgstr "Vorherige"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Nächste"
|
msgstr "Nächste"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Abgerufen in %s"
|
msgstr "Abgerufen in %s Sekunden"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sortieren nach Seeders"
|
msgstr "Sortieren nach Seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Straßen"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellit"
|
msgstr "Satellit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI-Satellit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topographisch"
|
msgstr "Topographisch"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Sie befinden sich innerhalb von "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "Metern von diesem Punkt entfernt"
|
msgstr "Metern von diesem Punkt entfernt"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunden"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekunden"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Αναζήτηση νέων αποτελεσμάτων"
|
msgstr "Αναζήτηση νέων αποτελεσμάτων..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Προηγούμενο"
|
msgstr "Προηγούμενο"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Επόμενο"
|
msgstr "Επόμενο"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Ανακτήθηκε σε %s"
|
msgstr "Ανακτήθηκε σε %s δευτερόλεπτα"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ταξινόμηση κατά seeders"
|
msgstr "Ταξινόμηση κατά seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Δρόμοι"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Δορυφόρος"
|
msgstr "Δορυφόρος"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Δορυφόρος"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Τοπογραφικός"
|
msgstr "Τοπογραφικός"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Βρίσκεστε εντός "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "μέτρων από αυτό το σημείο"
|
msgstr "μέτρων από αυτό το σημείο"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Δευτερόλεπτα"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Χιλιοστά του δευτερολέπτου"
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ msgid "site_name"
|
||||||
msgstr "QGato"
|
msgstr "QGato"
|
||||||
|
|
||||||
msgid "site_description"
|
msgid "site_description"
|
||||||
msgstr "A open-source private search engine."
|
msgstr "QGato - Private & Open"
|
||||||
|
|
||||||
msgid "site_tags"
|
msgid "site_tags"
|
||||||
msgstr "search, qgato, spitfire"
|
msgstr "search, qgato, spitfire"
|
||||||
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Searching for new results"
|
msgstr "Searching for new results..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Previous"
|
msgstr "Previous"
|
||||||
|
@ -116,13 +116,7 @@ msgid "next"
|
||||||
msgstr "Next"
|
msgstr "Next"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Fetched in %s"
|
msgstr "Fetched in %s seconds"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "seconds"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "milliseconds"
|
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Number of Seeders"
|
msgstr "Number of Seeders"
|
||||||
|
@ -190,6 +184,8 @@ msgstr "Streets"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellite"
|
msgstr "Satellite"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Esri Satellite"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topographic"
|
msgstr "Topographic"
|
||||||
|
@ -202,9 +198,3 @@ msgstr "You are within "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meters from this point"
|
msgstr "meters from this point"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Seconds"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milliseconds"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torentoj"
|
msgstr "Torentoj"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Serĉante novajn rezultojn"
|
msgstr "Serĉante novajn rezultojn..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Antaŭa"
|
msgstr "Antaŭa"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Sekva"
|
msgstr "Sekva"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Prenita en %s"
|
msgstr "Prenita en %s sekundoj"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ordigi laŭ semantoj"
|
msgstr "Ordigi laŭ semantoj"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Stratoj"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelito"
|
msgstr "Satelito"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelito"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografia"
|
msgstr "Topografia"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Vi estas ene de "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metroj de ĉi tiu punkto"
|
msgstr "metroj de ĉi tiu punkto"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundoj"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekundoj"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Buscando nuevos resultados"
|
msgstr "Buscando nuevos resultados..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Anterior"
|
msgstr "Anterior"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Siguiente"
|
msgstr "Siguiente"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Obtenido en %s"
|
msgstr "Obtenido en %s segundos"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ordenar por seeders"
|
msgstr "Ordenar por seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Calles"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satélite"
|
msgstr "Satélite"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satélite ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topográfico"
|
msgstr "Topográfico"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Estás dentro de "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metros de este punto"
|
msgstr "metros de este punto"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Segundos"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisegundos"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrendid"
|
msgstr "Torrendid"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Otsitakse uusi tulemusi"
|
msgstr "Otsitakse uusi tulemusi..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Eelmine"
|
msgstr "Eelmine"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Järgmine"
|
msgstr "Järgmine"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Laaditud %s"
|
msgstr "Laaditud %s sekundiga"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sorteeri külvajate järgi"
|
msgstr "Sorteeri külvajate järgi"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Tänavad"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelliit"
|
msgstr "Satelliit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelliit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topograafiline"
|
msgstr "Topograafiline"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Olete "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meetri kaugusel sellest punktist"
|
msgstr "meetri kaugusel sellest punktist"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundit"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekundit"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "تورنتها"
|
msgstr "تورنتها"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "در حال جستجوی نتایج جدید"
|
msgstr "در حال جستجوی نتایج جدید..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "قبلی"
|
msgstr "قبلی"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "بعدی"
|
msgstr "بعدی"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "بازیابی شده در %s"
|
msgstr "بازیابی شده در %s ثانیه"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "مرتبسازی بر اساس سیدرها"
|
msgstr "مرتبسازی بر اساس سیدرها"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "خیابانها"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "ماهواره"
|
msgstr "ماهواره"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ماهواره ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "توپوگرافی"
|
msgstr "توپوگرافی"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "شما در فاصله "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "متری از این نقطه قرار دارید"
|
msgstr "متری از این نقطه قرار دارید"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "ثانیه"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "میلیثانیه"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrentit"
|
msgstr "Torrentit"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Haetaan uusia tuloksia"
|
msgstr "Haetaan uusia tuloksia..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Edellinen"
|
msgstr "Edellinen"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Seuraava"
|
msgstr "Seuraava"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Haettu %s"
|
msgstr "Haettu %s sekunnissa"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Lajittele lähettäjien mukaan"
|
msgstr "Lajittele lähettäjien mukaan"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Kadut"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelliitti"
|
msgstr "Satelliitti"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelliitti"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografinen"
|
msgstr "Topografinen"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Olet "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metrin päässä tästä pisteestä"
|
msgstr "metrin päässä tästä pisteestä"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekuntia"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekuntia"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Recherche de nouveaux résultats"
|
msgstr "Recherche de nouveaux résultats..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Précédent"
|
msgstr "Précédent"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Suivant"
|
msgstr "Suivant"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Récupéré en %s"
|
msgstr "Récupéré en %s secondes"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Trier par seeders"
|
msgstr "Trier par seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Rues"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellite"
|
msgstr "Satellite"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satellite ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topographique"
|
msgstr "Topographique"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Vous êtes à "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "mètres de ce point"
|
msgstr "mètres de ce point"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Secondes"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisecondes"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "टोरेंट्स"
|
msgstr "टोरेंट्स"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "नए परिणामों की खोज कर रहे हैं"
|
msgstr "नए परिणामों की खोज कर रहे हैं..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "पिछला"
|
msgstr "पिछला"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "अगला"
|
msgstr "अगला"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "%s"
|
msgstr "%s सेकंड में प्राप्त किया गया"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "सीडर्स के अनुसार छांटें"
|
msgstr "सीडर्स के अनुसार छांटें"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "सड़कें"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "सैटेलाइट"
|
msgstr "सैटेलाइट"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI सैटेलाइट"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "टोपोग्राफिक"
|
msgstr "टोपोग्राफिक"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "आप यहाँ हैं: "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "मीटर इस बिंदु से दूर"
|
msgstr "मीटर इस बिंदु से दूर"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "सेकंड"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "मिलीसेकंड"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenti"
|
msgstr "Torrenti"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Traže se novi rezultati"
|
msgstr "Traže se novi rezultati..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Prethodno"
|
msgstr "Prethodno"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Sljedeće"
|
msgstr "Sljedeće"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Dohvaćeno za %s"
|
msgstr "Dohvaćeno za %s sekundi"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sjeme (najviše)"
|
msgstr "Sjeme (najviše)"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ulice"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelit"
|
msgstr "Satelit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografski"
|
msgstr "Topografski"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Nalazite se unutar "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metara od ove točke"
|
msgstr "metara od ove točke"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunde"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekunde"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Új találatok keresése"
|
msgstr "Új találatok keresése..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Előző"
|
msgstr "Előző"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Következő"
|
msgstr "Következő"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Lekérve %s"
|
msgstr "Lekérve %s másodperc alatt"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Rendezés seederek szerint"
|
msgstr "Rendezés seederek szerint"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Utcák"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Műhold"
|
msgstr "Műhold"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Műhold"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topográfiai"
|
msgstr "Topográfiai"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Ön itt van: "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "méterre ettől a ponttól"
|
msgstr "méterre ettől a ponttól"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Másodperc"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milliszekundum"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Թորրենտներ"
|
msgstr "Թորրենտներ"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Նոր արդյունքներ որոնվում են"
|
msgstr "Նոր արդյունքներ որոնվում են..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Նախորդը"
|
msgstr "Նախորդը"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Հաջորդը"
|
msgstr "Հաջորդը"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Բեռնված է %s"
|
msgstr "Բեռնված է %s վայրկյանում"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ներբեռնում (արտահանող)"
|
msgstr "Ներբեռնում (արտահանող)"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Փողոցներ"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Արհեստական արբանյակ"
|
msgstr "Արհեստական արբանյակ"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Արհեստական արբանյակ"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Տոպոգրաֆիկ"
|
msgstr "Տոպոգրաֆիկ"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Դուք գտնվում եք "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "մետր հեռավորության վրա այս կետից"
|
msgstr "մետր հեռավորության վրա այս կետից"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Վայրկյաններ"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Միլիվայրկյաններ"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrent"
|
msgstr "Torrent"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Mencari hasil baru"
|
msgstr "Mencari hasil baru..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Sebelumnya"
|
msgstr "Sebelumnya"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Berikutnya"
|
msgstr "Berikutnya"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Ditemukan dalam %s"
|
msgstr "Ditemukan dalam %s detik"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Urutkan berdasarkan seeder"
|
msgstr "Urutkan berdasarkan seeder"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Jalan"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelit"
|
msgstr "Satelit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satelit ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografi"
|
msgstr "Topografi"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Anda berada dalam jarak "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meter dari titik ini"
|
msgstr "meter dari titik ini"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Detik"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milidetik"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrent"
|
msgstr "Torrent"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Ricerca di nuovi risultati"
|
msgstr "Ricerca di nuovi risultati..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Precedente"
|
msgstr "Precedente"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Successivo"
|
msgstr "Successivo"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Ottenuto in %s"
|
msgstr "Ottenuto in %s secondi"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ordina per seeders"
|
msgstr "Ordina per seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Strade"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellitare"
|
msgstr "Satellitare"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satellitare ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografico"
|
msgstr "Topografico"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Sei entro "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metri da questo punto"
|
msgstr "metri da questo punto"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Secondi"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisecondi"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "טורנטים"
|
msgstr "טורנטים"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "מחפש תוצאות חדשות"
|
msgstr "מחפש תוצאות חדשות..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "הקודם"
|
msgstr "הקודם"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "הבא"
|
msgstr "הבא"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "הובא ב-%s"
|
msgstr "הובא ב-%s שניות"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "מיון לפי משתפים"
|
msgstr "מיון לפי משתפים"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "רחובות"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "לוויין"
|
msgstr "לוויין"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "לוויין ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "טופוגרפי"
|
msgstr "טופוגרפי"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "אתם נמצאים במרחק של "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "מטרים מהנקודה הזו"
|
msgstr "מטרים מהנקודה הזו"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "שניות"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "אלפיות שניה"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "トレント"
|
msgstr "トレント"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "新しい結果を検索中"
|
msgstr "新しい結果を検索中..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "前"
|
msgstr "前"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "次"
|
msgstr "次"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "%s"
|
msgstr "%s 秒で取得"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "シーダーで並べ替え"
|
msgstr "シーダーで並べ替え"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "ストリート"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "衛星"
|
msgstr "衛星"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI 衛星"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "地形図"
|
msgstr "地形図"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "あなたは "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "メートル以内の位置にいます"
|
msgstr "メートル以内の位置にいます"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "秒"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "ミリ秒"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "토렌트"
|
msgstr "토렌트"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "새로운 결과를 검색 중"
|
msgstr "새로운 결과를 검색 중..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "이전"
|
msgstr "이전"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "다음"
|
msgstr "다음"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "%s"
|
msgstr "%s초 만에 가져옴"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "시더 기준 정렬"
|
msgstr "시더 기준 정렬"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "거리"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "위성"
|
msgstr "위성"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI 위성"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "지형도"
|
msgstr "지형도"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "당신은 이 안에 있습니다: "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "미터 떨어진 지점"
|
msgstr "미터 떨어진 지점"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "초"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "밀리초"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrentai"
|
msgstr "Torrentai"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Ieškoma naujų rezultatų"
|
msgstr "Ieškoma naujų rezultatų..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Ankstesnis"
|
msgstr "Ankstesnis"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Kitas"
|
msgstr "Kitas"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Gauta per %s"
|
msgstr "Gauta per %s sekundes"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Rikiuoti pagal siuntėjus"
|
msgstr "Rikiuoti pagal siuntėjus"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Gatvės"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Palydovas"
|
msgstr "Palydovas"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI palydovas"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografinis"
|
msgstr "Topografinis"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Jūs esate "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metrų nuo šio taško"
|
msgstr "metrų nuo šio taško"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundės"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekundės"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torenti"
|
msgstr "Torenti"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Meklē jaunus rezultātus"
|
msgstr "Meklē jaunus rezultātus..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Iepriekšējais"
|
msgstr "Iepriekšējais"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Nākamais"
|
msgstr "Nākamais"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Iegūts %s"
|
msgstr "Iegūts %s sekundēs"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Kārtot pēc sējējiem"
|
msgstr "Kārtot pēc sējējiem"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ielas"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelīts"
|
msgstr "Satelīts"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI satelīts"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topogrāfiskais"
|
msgstr "Topogrāfiskais"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Jūs atrodaties "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metru attālumā no šī punkta"
|
msgstr "metru attālumā no šī punkta"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundes"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekundes"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Nieuwe resultaten zoeken"
|
msgstr "Nieuwe resultaten zoeken..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Vorige"
|
msgstr "Vorige"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Volgende"
|
msgstr "Volgende"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Opgehaald in %s"
|
msgstr "Opgehaald in %s seconden"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sorteer op seeders"
|
msgstr "Sorteer op seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Straten"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelliet"
|
msgstr "Satelliet"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelliet"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografisch"
|
msgstr "Topografisch"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Je bevindt je binnen "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meter van dit punt"
|
msgstr "meter van dit punt"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Seconden"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milliseconden"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenter"
|
msgstr "Torrenter"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Søker etter nye resultater"
|
msgstr "Søker etter nye resultater..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Forrige"
|
msgstr "Forrige"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Neste"
|
msgstr "Neste"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Hentet på %s"
|
msgstr "Hentet på %s sekunder"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sorter etter seeders"
|
msgstr "Sorter etter seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Gater"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellitt"
|
msgstr "Satellitt"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satellitt"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografisk"
|
msgstr "Topografisk"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Du er innenfor "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meter fra dette punktet"
|
msgstr "meter fra dette punktet"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunder"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekunder"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenty"
|
msgstr "Torrenty"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Wyszukiwanie nowych wyników"
|
msgstr "Wyszukiwanie nowych wyników..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Poprzednie"
|
msgstr "Poprzednie"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Następne"
|
msgstr "Następne"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Pobrano w %s"
|
msgstr "Pobrano w %s sekund"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Liczba seedów"
|
msgstr "Liczba seedów"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ulice"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelita"
|
msgstr "Satelita"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Esri Satelita"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topograficzna"
|
msgstr "Topograficzna"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Znajdujesz się w odległości "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metrów od tego punktu"
|
msgstr "metrów od tego punktu"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundy"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekundy"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Procurando por novos resultados"
|
msgstr "Procurando por novos resultados..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Anterior"
|
msgstr "Anterior"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Próximo"
|
msgstr "Próximo"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Obtido em %s"
|
msgstr "Obtido em %s segundos"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ordenar por seeders"
|
msgstr "Ordenar por seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ruas"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satélite"
|
msgstr "Satélite"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satélite ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topográfico"
|
msgstr "Topográfico"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Você está dentro de "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metros deste ponto"
|
msgstr "metros deste ponto"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Segundos"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milissegundos"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenturi"
|
msgstr "Torrenturi"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Caut rezultate noi"
|
msgstr "Caut rezultate noi..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Anterior"
|
msgstr "Anterior"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Următorul"
|
msgstr "Următorul"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Obținut în %s"
|
msgstr "Obținut în %s secunde"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sortează după seeders"
|
msgstr "Sortează după seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Străzi"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelit"
|
msgstr "Satelit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Satelit ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografic"
|
msgstr "Topografic"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Te afli la "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metri de acest punct"
|
msgstr "metri de acest punct"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Secunde"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisecunde"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Торренты"
|
msgstr "Торренты"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Идёт поиск новых результатов"
|
msgstr "Идёт поиск новых результатов..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Предыдущий"
|
msgstr "Предыдущий"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Следующий"
|
msgstr "Следующий"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Получено за %s"
|
msgstr "Получено за %s секунд"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Сортировать по сидерам"
|
msgstr "Сортировать по сидерам"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Улицы"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Спутник"
|
msgstr "Спутник"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Спутник ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Топографическая"
|
msgstr "Топографическая"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Вы находитесь в "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "метрах от этой точки"
|
msgstr "метрах от этой точки"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Секунды"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Миллисекунды"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenty"
|
msgstr "Torrenty"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Hľadám nové výsledky"
|
msgstr "Hľadám nové výsledky..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Predchádzajúce"
|
msgstr "Predchádzajúce"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Ďalšie"
|
msgstr "Ďalšie"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Načítané za %s"
|
msgstr "Načítané za %s sekúnd"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Zoradiť podľa seedrov"
|
msgstr "Zoradiť podľa seedrov"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ulice"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelit"
|
msgstr "Satelit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografické"
|
msgstr "Topografické"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Nachádzate sa vo vzdialenosti "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metrov od tohto bodu"
|
msgstr "metrov od tohto bodu"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekundy"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekundy"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrenti"
|
msgstr "Torrenti"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Iskanje novih rezultatov"
|
msgstr "Iskanje novih rezultatov..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Prejšnje"
|
msgstr "Prejšnje"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Naslednje"
|
msgstr "Naslednje"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Pridobljeno v %s"
|
msgstr "Pridobljeno v %s sekundah"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Razvrsti po seederjih"
|
msgstr "Razvrsti po seederjih"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Ulice"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelit"
|
msgstr "Satelit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI satelit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografsko"
|
msgstr "Topografsko"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Nahajate se znotraj "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metrov od te točke"
|
msgstr "metrov od te točke"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunde"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekunde"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Торенти"
|
msgstr "Торенти"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Тражење нових резултата"
|
msgstr "Тражење нових резултата..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Претходно"
|
msgstr "Претходно"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Следеће"
|
msgstr "Следеће"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Преузето за %s"
|
msgstr "Преузето за %s секунди"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Сортирај по сеедерима"
|
msgstr "Сортирај по сеедерима"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Улице"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Сателит"
|
msgstr "Сателит"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI сателит"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Топографска"
|
msgstr "Топографска"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Налазите се на удаљености од "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "метара од ове тачке"
|
msgstr "метара од ове тачке"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Секунди"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Милисекунде"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Söker efter nya resultat"
|
msgstr "Söker efter nya resultat..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Föregående"
|
msgstr "Föregående"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Nästa"
|
msgstr "Nästa"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Hämtad på %s"
|
msgstr "Hämtad på %s sekunder"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sortera efter seeders"
|
msgstr "Sortera efter seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Gator"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satellit"
|
msgstr "Satellit"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satellit"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografisk"
|
msgstr "Topografisk"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Du är inom "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "meter från denna punkt"
|
msgstr "meter från denna punkt"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunder"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Millisekunder"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torenti"
|
msgstr "Torenti"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Inatafuta matokeo mapya"
|
msgstr "Inatafuta matokeo mapya..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Ya awali"
|
msgstr "Ya awali"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Mitaa"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Setilaiti"
|
msgstr "Setilaiti"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Setilaiti ya ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografia"
|
msgstr "Topografia"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Uko ndani ya "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "mita kutoka eneo hili"
|
msgstr "mita kutoka eneo hili"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Sekunde"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisekunde"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "ทอร์เรนต์"
|
msgstr "ทอร์เรนต์"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "กำลังค้นหาผลลัพธ์ใหม่"
|
msgstr "กำลังค้นหาผลลัพธ์ใหม่..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "ก่อนหน้า"
|
msgstr "ก่อนหน้า"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "ถัดไป"
|
msgstr "ถัดไป"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "ดึงข้อมูลใน %s"
|
msgstr "ดึงข้อมูลใน %s วินาที"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "จัดเรียงตามซีดเดอร์"
|
msgstr "จัดเรียงตามซีดเดอร์"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "ถนน"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "ดาวเทียม"
|
msgstr "ดาวเทียม"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ดาวเทียม ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "ภูมิประเทศ"
|
msgstr "ภูมิประเทศ"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "คุณอยู่ภายในระยะ "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "เมตรจากจุดนี้"
|
msgstr "เมตรจากจุดนี้"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "วินาที"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "มิลลิวินาที"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Mga Torrents"
|
msgstr "Mga Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Naghahanap ng mga bagong resulta"
|
msgstr "Naghahanap ng mga bagong resulta..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Nakaraan"
|
msgstr "Nakaraan"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Susunod"
|
msgstr "Susunod"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Nakuha sa %s"
|
msgstr "Nakuha sa %s segundo"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Ayusin ayon sa seeders"
|
msgstr "Ayusin ayon sa seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Mga Kalye"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Satelite"
|
msgstr "Satelite"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Satelite"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topograpiko"
|
msgstr "Topograpiko"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Ikaw ay nasa loob ng "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metro mula sa puntong ito"
|
msgstr "metro mula sa puntong ito"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Segundo"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milyasegundo"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrentler"
|
msgstr "Torrentler"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Yeni sonuçlar aranıyor"
|
msgstr "Yeni sonuçlar aranıyor..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Önceki"
|
msgstr "Önceki"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Sonraki"
|
msgstr "Sonraki"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "%s"
|
msgstr "%s saniyede alındı"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Seeders'a göre sırala"
|
msgstr "Seeders'a göre sırala"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Sokaklar"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Uydu"
|
msgstr "Uydu"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI Uydu"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Topografik"
|
msgstr "Topografik"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Şuradasınız: "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "metre bu noktadan"
|
msgstr "metre bu noktadan"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Saniye"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Milisaniye"
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Торренти"
|
msgstr "Торренти"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Шукаю нові результати"
|
msgstr "Шукаю нові результати..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Попередній"
|
msgstr "Попередній"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Наступний"
|
msgstr "Наступний"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Отримано за %s"
|
msgstr "Отримано за %s секунд"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Сортувати за сідерами"
|
msgstr "Сортувати за сідерами"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Вулиці"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Супутник"
|
msgstr "Супутник"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Супутник ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Топографічна"
|
msgstr "Топографічна"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Ви перебуваєте в межах "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "метрів від цієї точки"
|
msgstr "метрів від цієї точки"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Секунди"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Мілісекунди"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "Torrents"
|
msgstr "Torrents"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "Đang tìm kiếm kết quả mới"
|
msgstr "Đang tìm kiếm kết quả mới..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "Trước"
|
msgstr "Trước"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "Tiếp theo"
|
msgstr "Tiếp theo"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "Đã tìm trong %s"
|
msgstr "Đã tìm trong %s giây"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "Sắp xếp theo seeders"
|
msgstr "Sắp xếp theo seeders"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "Đường phố"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "Vệ tinh"
|
msgstr "Vệ tinh"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "Vệ tinh ESRI"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "Địa hình"
|
msgstr "Địa hình"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "Bạn đang ở trong phạm vi "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "mét từ điểm này"
|
msgstr "mét từ điểm này"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "Giây"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "Mili giây"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "种子"
|
msgstr "种子"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "正在搜索新结果"
|
msgstr "正在搜索新结果..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "上一页"
|
msgstr "上一页"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "下一页"
|
msgstr "下一页"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "%s"
|
msgstr "%s 秒内获取"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "排序:上传者"
|
msgstr "排序:上传者"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "街道"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "卫星"
|
msgstr "卫星"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI 卫星"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "地形图"
|
msgstr "地形图"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "您距离此点 "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "米"
|
msgstr "米"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "秒"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "毫秒"
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ msgid "torrents"
|
||||||
msgstr "種子"
|
msgstr "種子"
|
||||||
|
|
||||||
msgid "searching_for_new_results"
|
msgid "searching_for_new_results"
|
||||||
msgstr "正在搜尋新結果"
|
msgstr "正在搜尋新結果..."
|
||||||
|
|
||||||
msgid "previous"
|
msgid "previous"
|
||||||
msgstr "上一頁"
|
msgstr "上一頁"
|
||||||
|
@ -116,7 +116,7 @@ msgid "next"
|
||||||
msgstr "下一頁"
|
msgstr "下一頁"
|
||||||
|
|
||||||
msgid "fetched_in"
|
msgid "fetched_in"
|
||||||
msgstr "已於 %s"
|
msgstr "已於 %s 秒內加載"
|
||||||
|
|
||||||
msgid "sort_seeders"
|
msgid "sort_seeders"
|
||||||
msgstr "排序(種子數量)"
|
msgstr "排序(種子數量)"
|
||||||
|
@ -184,6 +184,8 @@ msgstr "街道"
|
||||||
msgid "satellite"
|
msgid "satellite"
|
||||||
msgstr "衛星"
|
msgstr "衛星"
|
||||||
|
|
||||||
|
msgid "esri_satellite"
|
||||||
|
msgstr "ESRI 衛星"
|
||||||
|
|
||||||
msgid "topographic"
|
msgid "topographic"
|
||||||
msgstr "地形"
|
msgstr "地形"
|
||||||
|
@ -196,9 +198,3 @@ msgstr "您在 "
|
||||||
|
|
||||||
msgid "meters_from_point"
|
msgid "meters_from_point"
|
||||||
msgstr "公尺範圍內"
|
msgstr "公尺範圍內"
|
||||||
|
|
||||||
msgid "seconds"
|
|
||||||
msgstr "秒"
|
|
||||||
|
|
||||||
msgid "milliseconds"
|
|
||||||
msgstr "毫秒"
|
|
||||||
|
|
31
main.go
31
main.go
|
@ -164,8 +164,6 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
handleImageSearch(w, r, settings, query, page)
|
handleImageSearch(w, r, settings, query, page)
|
||||||
case "video":
|
case "video":
|
||||||
handleVideoSearch(w, settings, query, page)
|
handleVideoSearch(w, settings, query, page)
|
||||||
case "music":
|
|
||||||
handleMusicSearch(w, settings, query, page)
|
|
||||||
case "map":
|
case "map":
|
||||||
handleMapSearch(w, settings, query)
|
handleMapSearch(w, settings, query)
|
||||||
case "forum":
|
case "forum":
|
||||||
|
@ -175,7 +173,7 @@ func handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
case "text":
|
case "text":
|
||||||
fallthrough
|
fallthrough
|
||||||
default:
|
default:
|
||||||
HandleTextSearchWithInstantAnswer(w, settings, query, page)
|
HandleTextSearch(w, settings, query, page)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,7 +226,7 @@ func runServer() {
|
||||||
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
||||||
http.ServeFile(w, r, "static/opensearch.xml")
|
http.ServeFile(w, r, "static/opensearch.xml")
|
||||||
})
|
})
|
||||||
printInfo("Website is enabled.")
|
printInfo("Website functionality enabled.")
|
||||||
} else {
|
} else {
|
||||||
// Redirect all website routes to a "service disabled" handler
|
// Redirect all website routes to a "service disabled" handler
|
||||||
http.HandleFunc("/static/", handleWebsiteDisabled)
|
http.HandleFunc("/static/", handleWebsiteDisabled)
|
||||||
|
@ -240,7 +238,11 @@ func runServer() {
|
||||||
http.HandleFunc("/image_status", handleWebsiteDisabled)
|
http.HandleFunc("/image_status", handleWebsiteDisabled)
|
||||||
http.HandleFunc("/privacy", handleWebsiteDisabled)
|
http.HandleFunc("/privacy", handleWebsiteDisabled)
|
||||||
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
|
http.HandleFunc("/opensearch.xml", handleWebsiteDisabled)
|
||||||
printInfo("Website is disabled.")
|
printInfo("Website functionality disabled.")
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.NodesEnabled {
|
||||||
|
http.HandleFunc("/node", handleNodeRequest)
|
||||||
}
|
}
|
||||||
|
|
||||||
printMessage("Server is listening on http://localhost:%d", config.Port)
|
printMessage("Server is listening on http://localhost:%d", config.Port)
|
||||||
|
@ -250,7 +252,7 @@ func runServer() {
|
||||||
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
|
func handleWebsiteDisabled(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Header().Set("Content-Type", "text/plain")
|
w.Header().Set("Content-Type", "text/plain")
|
||||||
w.WriteHeader(http.StatusServiceUnavailable)
|
w.WriteHeader(http.StatusServiceUnavailable)
|
||||||
_, _ = w.Write([]byte("The website is currently disabled."))
|
_, _ = w.Write([]byte("The website functionality is currently disabled."))
|
||||||
}
|
}
|
||||||
|
|
||||||
func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {
|
func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -278,5 +280,20 @@ func handlePrivacyPage(w http.ResponseWriter, r *http.Request) {
|
||||||
LanguageOptions: languageOptions,
|
LanguageOptions: languageOptions,
|
||||||
}
|
}
|
||||||
|
|
||||||
renderTemplate(w, "privacy.html", toMap(data))
|
// Parse the template
|
||||||
|
tmpl, err := template.New("privacy.html").ParseFiles("templates/privacy.html")
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error parsing template: %v", err)
|
||||||
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the response content type
|
||||||
|
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||||
|
|
||||||
|
// Execute the template
|
||||||
|
if err := tmpl.Execute(w, data); err != nil {
|
||||||
|
log.Printf("Error executing template: %v", err)
|
||||||
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
7
map.go
7
map.go
|
@ -5,6 +5,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type NominatimResponse struct {
|
type NominatimResponse struct {
|
||||||
|
@ -58,7 +59,7 @@ func geocodeQuery(query string) (latitude, longitude string, found bool, err err
|
||||||
|
|
||||||
func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string) {
|
func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string) {
|
||||||
// Start measuring the time for geocoding the query
|
// Start measuring the time for geocoding the query
|
||||||
//startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
|
||||||
// Geocode the query to get coordinates
|
// Geocode the query to get coordinates
|
||||||
latitude, longitude, found, err := geocodeQuery(query)
|
latitude, longitude, found, err := geocodeQuery(query)
|
||||||
|
@ -69,7 +70,7 @@ func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Measure the elapsed time for geocoding
|
// Measure the elapsed time for geocoding
|
||||||
//elapsed := time.Since(startTime)
|
elapsedTime := time.Since(startTime)
|
||||||
|
|
||||||
// Prepare the data to pass to the template
|
// Prepare the data to pass to the template
|
||||||
data := map[string]interface{}{
|
data := map[string]interface{}{
|
||||||
|
@ -77,7 +78,7 @@ func handleMapSearch(w http.ResponseWriter, settings UserSettings, query string)
|
||||||
"Latitude": latitude,
|
"Latitude": latitude,
|
||||||
"Longitude": longitude,
|
"Longitude": longitude,
|
||||||
"Found": found,
|
"Found": found,
|
||||||
//"Fetched": FormatElapsedTime(elapsed), // not used in map tab
|
"Fetched": fmt.Sprintf("%.2f %s", elapsedTime.Seconds(), Translate("seconds")),
|
||||||
"Theme": settings.Theme,
|
"Theme": settings.Theme,
|
||||||
"Safe": settings.SafeSearch,
|
"Safe": settings.SafeSearch,
|
||||||
"IsThemeDark": settings.IsThemeDark,
|
"IsThemeDark": settings.IsThemeDark,
|
||||||
|
|
|
@ -1,80 +0,0 @@
|
||||||
// music-bandcamp.go - Bandcamp specific implementation
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
func SearchBandcamp(query string, page int) ([]MusicResult, error) {
|
|
||||||
baseURL := "https://bandcamp.com/search?"
|
|
||||||
params := url.Values{
|
|
||||||
"q": []string{query},
|
|
||||||
"page": []string{fmt.Sprintf("%d", page)},
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := http.Get(baseURL + params.Encode())
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("request failed: %v", err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to parse HTML: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var results []MusicResult
|
|
||||||
|
|
||||||
doc.Find("li.searchresult").Each(func(i int, s *goquery.Selection) {
|
|
||||||
// Extract the item type
|
|
||||||
itemType := strings.ToLower(strings.TrimSpace(s.Find("div.itemtype").Text()))
|
|
||||||
|
|
||||||
// Skip if the item is not an album or track
|
|
||||||
if itemType != "album" && itemType != "track" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
result := MusicResult{Source: "Bandcamp"}
|
|
||||||
|
|
||||||
// URL extraction
|
|
||||||
if urlSel := s.Find("div.itemurl a"); urlSel.Length() > 0 {
|
|
||||||
result.URL = strings.TrimSpace(urlSel.Text())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Title extraction
|
|
||||||
if titleSel := s.Find("div.heading a"); titleSel.Length() > 0 {
|
|
||||||
result.Title = strings.TrimSpace(titleSel.Text())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Artist extraction
|
|
||||||
if artistSel := s.Find("div.subhead"); artistSel.Length() > 0 {
|
|
||||||
result.Artist = strings.TrimSpace(artistSel.Text())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Thumbnail extraction
|
|
||||||
if thumbSel := s.Find("div.art img"); thumbSel.Length() > 0 {
|
|
||||||
result.Thumbnail, _ = thumbSel.Attr("src")
|
|
||||||
}
|
|
||||||
|
|
||||||
// // Iframe URL construction
|
|
||||||
// if linkHref, exists := s.Find("div.itemurl a").Attr("href"); exists {
|
|
||||||
// if itemID := extractSearchItemID(linkHref); itemID != "" {
|
|
||||||
// itemType := strings.ToLower(strings.TrimSpace(s.Find("div.itemtype").Text()))
|
|
||||||
// result.IframeSrc = fmt.Sprintf(
|
|
||||||
// "https://bandcamp.com/EmbeddedPlayer/%s=%s/size=large/bgcol=000/linkcol=fff/artwork=small",
|
|
||||||
// itemType,
|
|
||||||
// itemID,
|
|
||||||
// )
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
results = append(results, result)
|
|
||||||
})
|
|
||||||
|
|
||||||
return results, nil
|
|
||||||
}
|
|
|
@ -1,211 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
type SoundCloudTrack struct {
|
|
||||||
ID int `json:"id"`
|
|
||||||
Title string `json:"title"`
|
|
||||||
Permalink string `json:"permalink"`
|
|
||||||
ArtworkURL string `json:"artwork_url"`
|
|
||||||
Duration int `json:"duration"`
|
|
||||||
User struct {
|
|
||||||
Username string `json:"username"`
|
|
||||||
Permalink string `json:"permalink"`
|
|
||||||
} `json:"user"`
|
|
||||||
Streams struct {
|
|
||||||
HTTPMP3128URL string `json:"http_mp3_128_url"`
|
|
||||||
} `json:"streams"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func SearchSoundCloud(query string, page int) ([]MusicResult, error) {
|
|
||||||
clientID, err := extractClientID()
|
|
||||||
if err != nil {
|
|
||||||
return searchSoundCloudViaScraping(query, page)
|
|
||||||
}
|
|
||||||
|
|
||||||
apiResults, err := searchSoundCloudViaAPI(query, clientID, page)
|
|
||||||
if err == nil && len(apiResults) > 0 {
|
|
||||||
return convertSoundCloudResults(apiResults), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return searchSoundCloudViaScraping(query, page)
|
|
||||||
}
|
|
||||||
|
|
||||||
func searchSoundCloudViaAPI(query, clientID string, page int) ([]SoundCloudTrack, error) {
|
|
||||||
const limit = 10
|
|
||||||
offset := (page - 1) * limit
|
|
||||||
|
|
||||||
apiUrl := fmt.Sprintf(
|
|
||||||
"https://api-v2.soundcloud.com/search/tracks?q=%s&client_id=%s&limit=%d&offset=%d",
|
|
||||||
url.QueryEscape(query),
|
|
||||||
clientID,
|
|
||||||
limit,
|
|
||||||
offset,
|
|
||||||
)
|
|
||||||
|
|
||||||
resp, err := http.Get(apiUrl)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, fmt.Errorf("API request failed with status: %d", resp.StatusCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
var response struct {
|
|
||||||
Collection []SoundCloudTrack `json:"collection"`
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.Collection, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertSoundCloudResults(tracks []SoundCloudTrack) []MusicResult {
|
|
||||||
var results []MusicResult
|
|
||||||
|
|
||||||
for _, track := range tracks {
|
|
||||||
thumbnail := strings.Replace(track.ArtworkURL, "large", "t500x500", 1)
|
|
||||||
trackURL := fmt.Sprintf("https://soundcloud.com/%s/%s",
|
|
||||||
track.User.Permalink,
|
|
||||||
track.Permalink,
|
|
||||||
)
|
|
||||||
|
|
||||||
// Convert ms to hh:mm:ss
|
|
||||||
totalSeconds := track.Duration / 1000
|
|
||||||
hours := totalSeconds / 3600
|
|
||||||
minutes := (totalSeconds % 3600) / 60
|
|
||||||
seconds := totalSeconds % 60
|
|
||||||
|
|
||||||
var durationStr string
|
|
||||||
if hours > 0 {
|
|
||||||
durationStr = fmt.Sprintf("%d:%02d:%02d", hours, minutes, seconds)
|
|
||||||
} else {
|
|
||||||
durationStr = fmt.Sprintf("%d:%02d", minutes, seconds)
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, MusicResult{
|
|
||||||
Title: track.Title,
|
|
||||||
Artist: track.User.Username,
|
|
||||||
URL: trackURL,
|
|
||||||
Thumbnail: thumbnail,
|
|
||||||
//AudioURL: track.Streams.HTTPMP3128URL,
|
|
||||||
Source: "SoundCloud",
|
|
||||||
Duration: durationStr,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
func searchSoundCloudViaScraping(query string, page int) ([]MusicResult, error) {
|
|
||||||
searchUrl := fmt.Sprintf("https://soundcloud.com/search/sounds?q=%s", url.QueryEscape(query))
|
|
||||||
resp, err := http.Get(searchUrl)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var results []MusicResult
|
|
||||||
doc.Find("li.searchList__item").Each(func(i int, s *goquery.Selection) {
|
|
||||||
titleElem := s.Find("a.soundTitle__title")
|
|
||||||
artistElem := s.Find("a.soundTitle__username")
|
|
||||||
artworkElem := s.Find(".sound__coverArt")
|
|
||||||
|
|
||||||
title := strings.TrimSpace(titleElem.Text())
|
|
||||||
artist := strings.TrimSpace(artistElem.Text())
|
|
||||||
href, _ := titleElem.Attr("href")
|
|
||||||
thumbnail, _ := artworkElem.Find("span.sc-artwork").Attr("style")
|
|
||||||
|
|
||||||
if thumbnail != "" {
|
|
||||||
if matches := regexp.MustCompile(`url\((.*?)\)`).FindStringSubmatch(thumbnail); len(matches) > 1 {
|
|
||||||
thumbnail = strings.Trim(matches[1], `"`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if title == "" || href == "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
trackURL, err := url.Parse(href)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if trackURL.Host == "" {
|
|
||||||
trackURL.Scheme = "https"
|
|
||||||
trackURL.Host = "soundcloud.com"
|
|
||||||
}
|
|
||||||
|
|
||||||
trackURL.Path = strings.ReplaceAll(trackURL.Path, "//", "/")
|
|
||||||
fullURL := trackURL.String()
|
|
||||||
|
|
||||||
results = append(results, MusicResult{
|
|
||||||
Title: title,
|
|
||||||
Artist: artist,
|
|
||||||
URL: fullURL,
|
|
||||||
Thumbnail: thumbnail,
|
|
||||||
Source: "SoundCloud",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractClientID() (string, error) {
|
|
||||||
resp, err := http.Get("https://soundcloud.com/")
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
var clientID string
|
|
||||||
doc.Find("script[src]").Each(func(i int, s *goquery.Selection) {
|
|
||||||
if clientID != "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
src, _ := s.Attr("src")
|
|
||||||
if strings.Contains(src, "sndcdn.com/assets/") {
|
|
||||||
resp, err := http.Get(src)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, _ := io.ReadAll(resp.Body)
|
|
||||||
re := regexp.MustCompile(`client_id:"([^"]+)"`)
|
|
||||||
matches := re.FindSubmatch(body)
|
|
||||||
if len(matches) > 1 {
|
|
||||||
clientID = string(matches[1])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if clientID == "" {
|
|
||||||
return "", fmt.Errorf("client_id not found")
|
|
||||||
}
|
|
||||||
return clientID, nil
|
|
||||||
}
|
|
|
@ -1,81 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
func SearchSpotify(query string, page int) ([]MusicResult, error) {
|
|
||||||
searchUrl := fmt.Sprintf("https://open.spotify.com/search/%s", url.PathEscape(query))
|
|
||||||
|
|
||||||
client := &http.Client{
|
|
||||||
Timeout: 10 * time.Second,
|
|
||||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
|
||||||
return http.ErrUseLastResponse
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchUrl, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to create request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set user agent ?
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("request failed: %v", err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, fmt.Errorf("received non-200 status code: %d", resp.StatusCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to parse document: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var results []MusicResult
|
|
||||||
|
|
||||||
// Find track elements
|
|
||||||
doc.Find(`div[data-testid="tracklist-row"]`).Each(func(i int, s *goquery.Selection) {
|
|
||||||
// Extract title
|
|
||||||
title := s.Find(`div[data-testid="tracklist-row__title"] a`).Text()
|
|
||||||
title = strings.TrimSpace(title)
|
|
||||||
|
|
||||||
// Extract artist
|
|
||||||
artist := s.Find(`div[data-testid="tracklist-row__artist"] a`).First().Text()
|
|
||||||
artist = strings.TrimSpace(artist)
|
|
||||||
|
|
||||||
// Extract duration
|
|
||||||
duration := s.Find(`div[data-testid="tracklist-row__duration"]`).First().Text()
|
|
||||||
duration = strings.TrimSpace(duration)
|
|
||||||
|
|
||||||
// Extract URL
|
|
||||||
path, _ := s.Find(`div[data-testid="tracklist-row__title"] a`).Attr("href")
|
|
||||||
fullUrl := fmt.Sprintf("https://open.spotify.com%s", path)
|
|
||||||
|
|
||||||
// Extract thumbnail
|
|
||||||
thumbnail, _ := s.Find(`img[aria-hidden="false"]`).Attr("src")
|
|
||||||
|
|
||||||
if title != "" && artist != "" {
|
|
||||||
results = append(results, MusicResult{
|
|
||||||
Title: title,
|
|
||||||
Artist: artist,
|
|
||||||
URL: fullUrl,
|
|
||||||
Duration: duration,
|
|
||||||
Thumbnail: thumbnail,
|
|
||||||
Source: "Spotify",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return results, nil
|
|
||||||
}
|
|
113
music-youtube.go
113
music-youtube.go
|
@ -1,113 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
)
|
|
||||||
|
|
||||||
type MusicAPIResponse struct {
|
|
||||||
Items []struct {
|
|
||||||
Title string `json:"title"`
|
|
||||||
UploaderName string `json:"uploaderName"`
|
|
||||||
Duration int `json:"duration"`
|
|
||||||
Thumbnail string `json:"thumbnail"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
} `json:"items"` // Removed VideoID since we'll parse from URL
|
|
||||||
}
|
|
||||||
|
|
||||||
func SearchMusicViaPiped(query string, page int) ([]MusicResult, error) {
|
|
||||||
var lastError error
|
|
||||||
|
|
||||||
// We will try to use preferred instance
|
|
||||||
mu.Lock()
|
|
||||||
instance := preferredInstance
|
|
||||||
mu.Unlock()
|
|
||||||
|
|
||||||
if instance != "" && !disabledInstances[instance] {
|
|
||||||
url := fmt.Sprintf(
|
|
||||||
"https://%s/search?q=%s&filter=music_songs&page=%d",
|
|
||||||
instance,
|
|
||||||
url.QueryEscape(query),
|
|
||||||
page,
|
|
||||||
)
|
|
||||||
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err == nil && resp.StatusCode == http.StatusOK {
|
|
||||||
defer resp.Body.Close()
|
|
||||||
var apiResp MusicAPIResponse
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err == nil {
|
|
||||||
return convertPipedToMusicResults(instance, apiResp), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
printWarn("Preferred instance %s failed for music, falling back", instance)
|
|
||||||
disableInstance(instance)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Fallback using others
|
|
||||||
mu.Lock()
|
|
||||||
defer mu.Unlock()
|
|
||||||
for _, inst := range pipedInstances {
|
|
||||||
if disabledInstances[inst] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
url := fmt.Sprintf(
|
|
||||||
"https://%s/search?q=%s&filter=music_songs&page=%d",
|
|
||||||
inst,
|
|
||||||
url.QueryEscape(query),
|
|
||||||
page,
|
|
||||||
)
|
|
||||||
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil || resp.StatusCode != http.StatusOK {
|
|
||||||
printInfo("Disabling instance %s due to error: %v", inst, err)
|
|
||||||
disabledInstances[inst] = true
|
|
||||||
lastError = fmt.Errorf("request to %s failed: %w", inst, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
defer resp.Body.Close()
|
|
||||||
var apiResp MusicAPIResponse
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
|
||||||
lastError = fmt.Errorf("failed to decode response from %s: %w", inst, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
preferredInstance = inst
|
|
||||||
return convertPipedToMusicResults(inst, apiResp), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, fmt.Errorf("all Piped instances failed, last error: %v", lastError)
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertPipedToMusicResults(instance string, resp MusicAPIResponse) []MusicResult {
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
var results []MusicResult
|
|
||||||
|
|
||||||
for _, item := range resp.Items {
|
|
||||||
// Extract video ID from URL
|
|
||||||
u, err := url.Parse(item.URL)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
videoID := u.Query().Get("v")
|
|
||||||
if videoID == "" || seen[videoID] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[videoID] = true
|
|
||||||
|
|
||||||
results = append(results, MusicResult{
|
|
||||||
Title: item.Title,
|
|
||||||
Artist: item.UploaderName,
|
|
||||||
URL: fmt.Sprintf("https://music.youtube.com%s", item.URL),
|
|
||||||
Duration: formatDuration(item.Duration),
|
|
||||||
Thumbnail: item.Thumbnail,
|
|
||||||
Source: "YouTube Music",
|
|
||||||
//AudioURL: fmt.Sprintf("https://%s/stream/%s", instance, videoID),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results
|
|
||||||
}
|
|
177
music.go
177
music.go
|
@ -1,177 +0,0 @@
|
||||||
// music.go - Central music search handler
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type MusicSearchEngine struct {
|
|
||||||
Name string
|
|
||||||
Func func(query string, page int) ([]MusicResult, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
musicSearchEngines []MusicSearchEngine
|
|
||||||
cacheMutex = &sync.Mutex{}
|
|
||||||
)
|
|
||||||
|
|
||||||
var allMusicSearchEngines = []MusicSearchEngine{
|
|
||||||
{Name: "SoundCloud", Func: SearchSoundCloud},
|
|
||||||
{Name: "YouTube", Func: SearchMusicViaPiped},
|
|
||||||
{Name: "Bandcamp", Func: SearchBandcamp},
|
|
||||||
//{Name: "Spotify", Func: SearchSpotify},
|
|
||||||
}
|
|
||||||
|
|
||||||
func initMusicEngines() {
|
|
||||||
// Initialize with all engines if no specific config
|
|
||||||
musicSearchEngines = allMusicSearchEngines
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleMusicSearch(w http.ResponseWriter, settings UserSettings, query string, page int) {
|
|
||||||
start := time.Now()
|
|
||||||
|
|
||||||
cacheKey := CacheKey{
|
|
||||||
Query: query,
|
|
||||||
Page: page,
|
|
||||||
Type: "music",
|
|
||||||
Lang: settings.SearchLanguage,
|
|
||||||
Safe: settings.SafeSearch == "active",
|
|
||||||
}
|
|
||||||
|
|
||||||
var results []MusicResult
|
|
||||||
|
|
||||||
if cached, found := resultsCache.Get(cacheKey); found {
|
|
||||||
if musicResults, ok := convertCacheToMusicResults(cached); ok {
|
|
||||||
results = musicResults
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(results) == 0 {
|
|
||||||
results = fetchMusicResults(query, page)
|
|
||||||
if len(results) > 0 {
|
|
||||||
resultsCache.Set(cacheKey, convertMusicResultsToCache(results))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
go prefetchMusicPages(query, page)
|
|
||||||
|
|
||||||
elapsed := time.Since(start) // Calculate duration
|
|
||||||
|
|
||||||
data := map[string]interface{}{
|
|
||||||
"Results": results,
|
|
||||||
"Query": query,
|
|
||||||
"Page": page,
|
|
||||||
"HasPrevPage": page > 1,
|
|
||||||
"HasNextPage": len(results) >= 10, // Default page size
|
|
||||||
"NoResults": len(results) == 0,
|
|
||||||
"MusicServices": getMusicServiceNames(),
|
|
||||||
"CurrentService": "all", // Default service
|
|
||||||
"Theme": settings.Theme,
|
|
||||||
"IsThemeDark": settings.IsThemeDark,
|
|
||||||
"Trans": Translate,
|
|
||||||
"Fetched": FormatElapsedTime(elapsed),
|
|
||||||
}
|
|
||||||
|
|
||||||
renderTemplate(w, "music.html", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper to get music service names
|
|
||||||
func getMusicServiceNames() []string {
|
|
||||||
names := make([]string, len(allMusicSearchEngines))
|
|
||||||
for i, engine := range allMusicSearchEngines {
|
|
||||||
names[i] = engine.Name
|
|
||||||
}
|
|
||||||
return names
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertMusicResultsToCache(results []MusicResult) []SearchResult {
|
|
||||||
cacheResults := make([]SearchResult, len(results))
|
|
||||||
for i, r := range results {
|
|
||||||
cacheResults[i] = r
|
|
||||||
}
|
|
||||||
return cacheResults
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertCacheToMusicResults(cached []SearchResult) ([]MusicResult, bool) {
|
|
||||||
results := make([]MusicResult, 0, len(cached))
|
|
||||||
for _, item := range cached {
|
|
||||||
if musicResult, ok := item.(MusicResult); ok {
|
|
||||||
results = append(results, musicResult)
|
|
||||||
} else {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func fetchMusicResults(query string, page int) []MusicResult {
|
|
||||||
var results []MusicResult
|
|
||||||
resultsChan := make(chan []MusicResult, len(musicSearchEngines))
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
|
|
||||||
for _, engine := range musicSearchEngines {
|
|
||||||
wg.Add(1)
|
|
||||||
go func(e MusicSearchEngine) {
|
|
||||||
defer wg.Done()
|
|
||||||
res, err := e.Func(query, page)
|
|
||||||
if err == nil && len(res) > 0 {
|
|
||||||
resultsChan <- res
|
|
||||||
}
|
|
||||||
}(engine)
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
wg.Wait()
|
|
||||||
close(resultsChan)
|
|
||||||
}()
|
|
||||||
|
|
||||||
for res := range resultsChan {
|
|
||||||
results = append(results, res...)
|
|
||||||
if len(results) >= 50 { // Default max results
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return deduplicateResults(results)
|
|
||||||
}
|
|
||||||
|
|
||||||
func prefetchMusicPages(query string, currentPage int) {
|
|
||||||
for _, page := range []int{currentPage - 1, currentPage + 1} {
|
|
||||||
if page < 1 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
cacheKey := CacheKey{
|
|
||||||
Query: query,
|
|
||||||
Page: page,
|
|
||||||
Type: "music",
|
|
||||||
}
|
|
||||||
if _, found := resultsCache.Get(cacheKey); !found {
|
|
||||||
go fetchMusicResults(query, page)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func deduplicateResults(results []MusicResult) []MusicResult {
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
var unique []MusicResult
|
|
||||||
|
|
||||||
for _, res := range results {
|
|
||||||
if !seen[res.URL] {
|
|
||||||
seen[res.URL] = true
|
|
||||||
unique = append(unique, res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return unique
|
|
||||||
}
|
|
||||||
|
|
||||||
// func generatePlayerHTML(result MusicResult) template.HTML {
|
|
||||||
// if result.IframeSrc != "" {
|
|
||||||
// return template.HTML(fmt.Sprintf(
|
|
||||||
// `<iframe width="100%%" height="166" scrolling="no" frameborder="no" src="%s"></iframe>`,
|
|
||||||
// result.IframeSrc,
|
|
||||||
// ))
|
|
||||||
// }
|
|
||||||
// return template.HTML("")
|
|
||||||
// }
|
|
|
@ -1,203 +1,218 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"log"
|
||||||
)
|
)
|
||||||
|
|
||||||
type searchParams struct {
|
func handleSearchTextMessage(msg Message) {
|
||||||
|
var searchParams struct {
|
||||||
Query string `json:"query"`
|
Query string `json:"query"`
|
||||||
Safe string `json:"safe"`
|
Safe string `json:"safe"`
|
||||||
Lang string `json:"lang"`
|
Lang string `json:"lang"`
|
||||||
Page int `json:"page"`
|
Page int `json:"page"`
|
||||||
ResponseAddr string `json:"responseAddr"`
|
ResponseAddr string `json:"responseAddr"`
|
||||||
}
|
}
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||||
func extractTargetFromAddress(addr string) string {
|
|
||||||
if len(addr) > 5 && addr[len(addr)-5:] == ".sock" {
|
|
||||||
return addr[:len(addr)-5]
|
|
||||||
}
|
|
||||||
return addr
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility to respond to any search
|
|
||||||
func respondToSearch(req searchParams, msgType uint8, results any) {
|
|
||||||
if req.ResponseAddr == "" {
|
|
||||||
printErr("ResponseAddr is empty")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
respBytes, err := json.Marshal(results)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Failed to marshal results for msg type %d: %v", msgType, err)
|
printWarn("Error parsing search parameters: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resp := Message{
|
printDebug("Received search-text request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||||
ID: generateMessageID(),
|
|
||||||
Type: msgType,
|
|
||||||
Content: respBytes,
|
|
||||||
Target: req.ResponseAddr,
|
|
||||||
}
|
|
||||||
|
|
||||||
err = sendMessage(resp)
|
results := fetchTextResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||||
|
resultsJSON, err := json.Marshal(results)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Failed to send search results to %s: %v", req.ResponseAddr, err)
|
printWarn("Error marshalling search results: %v", err)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sendBinaryResponse(req searchParams, msgType uint8, payload []byte, msgID uint32) {
|
|
||||||
if req.ResponseAddr == "" {
|
|
||||||
printErr("ResponseAddr is empty")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resp := Message{
|
responseMsg := Message{
|
||||||
ID: msgID,
|
ID: hostID,
|
||||||
Type: msgType,
|
Type: "text-results",
|
||||||
Content: payload,
|
Content: string(resultsJSON),
|
||||||
Target: req.ResponseAddr,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sendMessage(resp); err != nil {
|
// Log the address to be used for sending the response
|
||||||
printWarn("Failed to send binary search results: %v", err)
|
printDebug("Sending text search results to %s", searchParams.ResponseAddr)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSearchTextMessage(msg Message) {
|
if searchParams.ResponseAddr == "" {
|
||||||
var req searchParams
|
printErr("Error: Response address is empty")
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
|
|
||||||
printWarn("Invalid JSON: %v", err)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printDebug("Received search-text from %s", req.ResponseAddr)
|
|
||||||
|
|
||||||
results := fetchTextResults(req.Query, req.Safe, req.Lang, req.Page)
|
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||||
data, err := encodeTextResults(results)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Failed to encode text results: %v", err)
|
printWarn("Error sending text search results to %s: %v", searchParams.ResponseAddr, err)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sendBinaryResponse(req, MsgTypeSearchTextResponse, data, msg.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSearchVideoMessage(msg Message) {
|
|
||||||
var req searchParams
|
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
|
|
||||||
printWarn("Invalid JSON: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
printDebug("Received search-video from %s", req.ResponseAddr)
|
|
||||||
|
|
||||||
results := fetchVideoResults(req.Query, req.Safe, req.Lang, req.Page)
|
|
||||||
data, err := encodeVideoResults(results)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Failed to encode video results: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
sendBinaryResponse(req, MsgTypeSearchVideoResponse, data, msg.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSearchMusicMessage(msg Message) {
|
|
||||||
var req searchParams
|
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
|
|
||||||
printWarn("Invalid JSON: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
printDebug("Received search-music from %s", req.ResponseAddr)
|
|
||||||
|
|
||||||
results := fetchMusicResults(req.Query, req.Page)
|
|
||||||
data, err := encodeMusicResults(results)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Failed to encode music results: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
sendBinaryResponse(req, MsgTypeSearchMusicResponse, data, msg.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSearchFileMessage(msg Message) {
|
|
||||||
var req searchParams
|
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
|
|
||||||
printWarn("Invalid JSON: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
printDebug("Received search-file from %s", req.ResponseAddr)
|
|
||||||
|
|
||||||
results := fetchFileResults(req.Query, req.Safe, req.Lang, req.Page)
|
|
||||||
data, err := encodeFileResults(results)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Failed to encode file results: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
sendBinaryResponse(req, MsgTypeSearchFileResponse, data, msg.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSearchForumMessage(msg Message) {
|
|
||||||
var req searchParams
|
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
|
|
||||||
printWarn("Invalid JSON: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
printDebug("Received search-forum from %s", req.ResponseAddr)
|
|
||||||
|
|
||||||
results := fetchForumResults(req.Query, req.Safe, req.Lang, req.Page)
|
|
||||||
data, err := encodeForumResults(results)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Failed to encode forum results: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
sendBinaryResponse(req, MsgTypeSearchForumResponse, data, msg.ID)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleSearchImageMessage(msg Message) {
|
func handleSearchImageMessage(msg Message) {
|
||||||
var req searchParams
|
var searchParams struct {
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &req); err != nil {
|
Query string `json:"query"`
|
||||||
printWarn("Invalid JSON: %v", err)
|
Safe string `json:"safe"`
|
||||||
return
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
}
|
}
|
||||||
printDebug("Received image search type %d from %s", msg.Type, req.ResponseAddr)
|
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||||
|
|
||||||
var (
|
|
||||||
thumbsNeeded bool
|
|
||||||
fullNeeded bool
|
|
||||||
)
|
|
||||||
|
|
||||||
switch msg.Type {
|
|
||||||
case MsgTypeSearchImageRawRequest:
|
|
||||||
thumbsNeeded = false
|
|
||||||
fullNeeded = false
|
|
||||||
case MsgTypeSearchImageThumbRequest:
|
|
||||||
thumbsNeeded = true
|
|
||||||
fullNeeded = false
|
|
||||||
case MsgTypeSearchImageFullRequest:
|
|
||||||
thumbsNeeded = false
|
|
||||||
fullNeeded = true
|
|
||||||
case MsgTypeSearchImageAllRequest:
|
|
||||||
thumbsNeeded = true
|
|
||||||
fullNeeded = true
|
|
||||||
default:
|
|
||||||
printWarn("Unknown image search type: %d", msg.Type)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
results := fetchImageResults(req.Query, req.Safe, req.Lang, req.Page, true, thumbsNeeded)
|
|
||||||
|
|
||||||
if fullNeeded || thumbsNeeded {
|
|
||||||
results = prepareProxiedImages(results, msg.Type)
|
|
||||||
}
|
|
||||||
|
|
||||||
data, err := encodeImageResults(results)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Failed to encode image results: %v", err)
|
log.Printf("Error parsing search parameters: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
sendBinaryResponse(req, MsgTypeSearchImageResponse, data, msg.ID)
|
log.Printf("Received search-image request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||||
|
results := fetchImageResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page, true)
|
||||||
|
resultsJSON, err := json.Marshal(results)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error marshalling search results: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMsg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "image-results",
|
||||||
|
Content: string(resultsJSON),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the address to be used for sending the response
|
||||||
|
log.Printf("Sending image search results to %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
if searchParams.ResponseAddr == "" {
|
||||||
|
log.Printf("Error: Response address is empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error sending image search results to %s: %v", searchParams.ResponseAddr, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleSearchVideoMessage(msg Message) {
|
||||||
|
var searchParams struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
}
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error parsing search parameters: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Received search-video request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
results := fetchVideoResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||||
|
resultsJSON, err := json.Marshal(results)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error marshalling search results: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMsg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "video-results",
|
||||||
|
Content: string(resultsJSON),
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Sending video search results to %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
if searchParams.ResponseAddr == "" {
|
||||||
|
log.Printf("Error: Response address is empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error sending video search results to %s: %v", searchParams.ResponseAddr, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleSearchFileMessage(msg Message) {
|
||||||
|
var searchParams struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
}
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error parsing search parameters: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Received search-file request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
results := fetchFileResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||||
|
resultsJSON, err := json.Marshal(results)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error marshalling search results: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMsg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "file-results",
|
||||||
|
Content: string(resultsJSON),
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Sending file search results to %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
if searchParams.ResponseAddr == "" {
|
||||||
|
log.Printf("Error: Response address is empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error sending file search results to %s: %v", searchParams.ResponseAddr, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleSearchForumMessage(msg Message) {
|
||||||
|
var searchParams struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
}
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &searchParams)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error parsing search parameters: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Received search-forum request. ResponseAddr: %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
results := fetchForumResults(searchParams.Query, searchParams.Safe, searchParams.Lang, searchParams.Page)
|
||||||
|
resultsJSON, err := json.Marshal(results)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error marshalling search results: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMsg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "forum-results",
|
||||||
|
Content: string(resultsJSON),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the address to be used for sending the response
|
||||||
|
log.Printf("Sending forum search results to %s", searchParams.ResponseAddr)
|
||||||
|
|
||||||
|
if searchParams.ResponseAddr == "" {
|
||||||
|
log.Printf("Error: Response address is empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sendMessage(searchParams.ResponseAddr, responseMsg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error sending forum search results to %s: %v", searchParams.ResponseAddr, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
91
node-master.go
Normal file
91
node-master.go
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
isMaster bool
|
||||||
|
masterNode string
|
||||||
|
masterNodeMux sync.RWMutex
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
heartbeatInterval = 5 * time.Second
|
||||||
|
heartbeatTimeout = 15 * time.Second
|
||||||
|
electionTimeout = 10 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
func sendHeartbeats() {
|
||||||
|
for {
|
||||||
|
if !isMaster {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, node := range peers {
|
||||||
|
msg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "heartbeat",
|
||||||
|
Content: authCode,
|
||||||
|
}
|
||||||
|
err := sendMessage(node, msg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error sending heartbeat to %s: %v", node, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
time.Sleep(heartbeatInterval)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkMasterHeartbeat() {
|
||||||
|
for {
|
||||||
|
time.Sleep(heartbeatTimeout)
|
||||||
|
masterNodeMux.RLock()
|
||||||
|
if masterNode == authCode || masterNode == "" {
|
||||||
|
masterNodeMux.RUnlock()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
masterNodeMux.RUnlock()
|
||||||
|
|
||||||
|
masterNodeMux.Lock()
|
||||||
|
masterNode = ""
|
||||||
|
masterNodeMux.Unlock()
|
||||||
|
startElection()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func startElection() {
|
||||||
|
masterNodeMux.Lock()
|
||||||
|
defer masterNodeMux.Unlock()
|
||||||
|
|
||||||
|
for _, node := range peers {
|
||||||
|
msg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "election",
|
||||||
|
Content: authCode,
|
||||||
|
}
|
||||||
|
err := sendMessage(node, msg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error sending election message to %s: %v", node, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isMaster = true
|
||||||
|
go sendHeartbeats()
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleHeartbeat(content string) {
|
||||||
|
masterNodeMux.Lock()
|
||||||
|
defer masterNodeMux.Unlock()
|
||||||
|
masterNode = content
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleElection(content string) {
|
||||||
|
masterNodeMux.Lock()
|
||||||
|
defer masterNodeMux.Unlock()
|
||||||
|
|
||||||
|
if content < authCode {
|
||||||
|
masterNode = content
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,22 +1,19 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"encoding/json"
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var fileResultsChan = make(chan []TorrentResult)
|
func tryOtherNodesForFileSearch(query, safe, lang string, page int, visitedNodes []string) []TorrentResult {
|
||||||
|
for _, nodeAddr := range peers {
|
||||||
func tryOtherNodesForFileSearch(query, safe, lang string, page int) []TorrentResult {
|
if contains(visitedNodes, nodeAddr) {
|
||||||
for _, nodeTarget := range sockets {
|
continue // Skip nodes already visited
|
||||||
results, err := sendFileSearchRequestToNode(nodeTarget, query, safe, lang, page)
|
}
|
||||||
|
results, err := sendFileSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error contacting node %s: %v", nodeTarget, err)
|
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
|
@ -26,123 +23,60 @@ func tryOtherNodesForFileSearch(query, safe, lang string, page int) []TorrentRes
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func sendFileSearchRequestToNode(target, query, safe, lang string, page int) ([]TorrentResult, error) {
|
func sendFileSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TorrentResult, error) {
|
||||||
payload, err := encodeSearchTextParams(query, safe, lang, page)
|
visitedNodes = append(visitedNodes, nodeAddr)
|
||||||
|
searchParams := struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
VisitedNodes []string `json:"visitedNodes"`
|
||||||
|
}{
|
||||||
|
Query: query,
|
||||||
|
Safe: safe,
|
||||||
|
Lang: lang,
|
||||||
|
Page: page,
|
||||||
|
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||||
|
VisitedNodes: visitedNodes,
|
||||||
|
}
|
||||||
|
|
||||||
|
msgBytes, err := json.Marshal(searchParams)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("encode error: %v", err)
|
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := Message{
|
msg := Message{
|
||||||
ID: generateMessageID(),
|
ID: hostID,
|
||||||
Type: MsgTypeSearchFileRequest,
|
Type: "search-file",
|
||||||
Content: payload,
|
Content: string(msgBytes),
|
||||||
Target: target,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sendMessage(msg); err != nil {
|
err = sendMessage(nodeAddr, msg)
|
||||||
return nil, fmt.Errorf("send error: %v", err)
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Wait for results
|
||||||
select {
|
select {
|
||||||
case res := <-fileResultsChan:
|
case res := <-fileResultsChan:
|
||||||
return res, nil
|
return res, nil
|
||||||
case <-time.After(20 * time.Second):
|
case <-time.After(20 * time.Second):
|
||||||
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
|
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleFileResultsMessage(msg Message) {
|
func handleFileResultsMessage(msg Message) {
|
||||||
results, err := decodeFileResults([]byte(msg.Content))
|
var results []TorrentResult
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error decoding file results: %v", err)
|
printWarn("Error unmarshalling file results: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printDebug("Received file results: %+v", results)
|
|
||||||
|
|
||||||
|
printDebug("Received file results: %+v", results)
|
||||||
|
// Send results to fileResultsChan
|
||||||
go func() {
|
go func() {
|
||||||
fileResultsChan <- results
|
fileResultsChan <- results
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func encodeFileResults(results []TorrentResult) ([]byte, error) {
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range results {
|
|
||||||
if err := writeString(buf, r.URL); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint32(r.Seeders)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint32(r.Leechers)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Magnet); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint32(r.Views)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Size); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Title); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeFileResults(data []byte) ([]TorrentResult, error) {
|
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
|
|
||||||
var count uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results := make([]TorrentResult, 0, count)
|
|
||||||
for i := 0; i < int(count); i++ {
|
|
||||||
url, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var seeders, leechers, views uint32
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &seeders); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &leechers); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
magnet, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &views); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
size, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
title, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, TorrentResult{
|
|
||||||
URL: url,
|
|
||||||
Seeders: int(seeders),
|
|
||||||
Leechers: int(leechers),
|
|
||||||
Magnet: magnet,
|
|
||||||
Views: int(views),
|
|
||||||
Size: size,
|
|
||||||
Title: title,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,129 +1,100 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"encoding/json"
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var forumResultsChan = make(chan []ForumSearchResult)
|
var forumResultsChan = make(chan []ForumSearchResult)
|
||||||
|
|
||||||
func sendForumSearchRequestToNode(target, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
|
func tryOtherNodesForForumSearch(query, safe, lang string, page int) []ForumSearchResult {
|
||||||
payload, err := encodeSearchTextParams(query, safe, lang, page) // reuse text param encoding
|
for _, nodeAddr := range peers {
|
||||||
|
results, err := sendForumSearchRequestToNode(nodeAddr, query, safe, lang, page, []string{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("encode error: %v", err)
|
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(results) > 0 {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func sendForumSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ForumSearchResult, error) {
|
||||||
|
// Check if the current node has already been visited
|
||||||
|
for _, node := range visitedNodes {
|
||||||
|
if node == hostID {
|
||||||
|
return nil, fmt.Errorf("loop detected: this node (%s) has already been visited", hostID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add current node to the list of visited nodes
|
||||||
|
visitedNodes = append(visitedNodes, hostID)
|
||||||
|
|
||||||
|
searchParams := struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
VisitedNodes []string `json:"visitedNodes"`
|
||||||
|
}{
|
||||||
|
Query: query,
|
||||||
|
Safe: safe,
|
||||||
|
Lang: lang,
|
||||||
|
Page: page,
|
||||||
|
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||||
|
VisitedNodes: visitedNodes,
|
||||||
|
}
|
||||||
|
|
||||||
|
msgBytes, err := json.Marshal(searchParams)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := Message{
|
msg := Message{
|
||||||
ID: generateMessageID(),
|
ID: hostID,
|
||||||
Type: MsgTypeSearchForumRequest,
|
Type: "search-forum",
|
||||||
Content: payload,
|
Content: string(msgBytes),
|
||||||
Target: target,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sendMessage(msg); err != nil {
|
err = sendMessage(nodeAddr, msg)
|
||||||
return nil, fmt.Errorf("send error: %v", err)
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Wait for results
|
||||||
select {
|
select {
|
||||||
case res := <-forumResultsChan:
|
case res := <-forumResultsChan:
|
||||||
return res, nil
|
return res, nil
|
||||||
case <-time.After(20 * time.Second):
|
case <-time.After(20 * time.Second):
|
||||||
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
|
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleForumResultsMessage(msg Message) {
|
func handleForumResultsMessage(msg Message) {
|
||||||
results, err := decodeForumResults([]byte(msg.Content))
|
var results []ForumSearchResult
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error decoding forum results: %v", err)
|
printWarn("Error unmarshalling forum results: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printDebug("Received forum results: %+v", results)
|
|
||||||
|
|
||||||
|
printDebug("Received forum results: %+v", results)
|
||||||
|
// Send results to forumResultsChan
|
||||||
go func() {
|
go func() {
|
||||||
forumResultsChan <- results
|
forumResultsChan <- results
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func encodeForumResults(results []ForumSearchResult) ([]byte, error) {
|
// Used only to answer requests
|
||||||
buf := new(bytes.Buffer)
|
func fetchForumResults(query, safe, lang string, page int) []ForumSearchResult {
|
||||||
|
results, err := PerformRedditSearch(query, safe, page)
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range results {
|
|
||||||
if err := writeString(buf, r.URL); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Header); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Description); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, r.PublishedDate.Unix()); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.ImgSrc); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.ThumbnailSrc); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeForumResults(data []byte) ([]ForumSearchResult, error) {
|
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
|
|
||||||
var count uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results := make([]ForumSearchResult, 0, count)
|
|
||||||
for i := 0; i < int(count); i++ {
|
|
||||||
url, err := readString(buf)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
printWarn("Error fetching forum results: %v", err)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
header, err := readString(buf)
|
return results
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
desc, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var unixTime int64
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &unixTime); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
imgSrc, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
thumbSrc, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, ForumSearchResult{
|
|
||||||
URL: url,
|
|
||||||
Header: header,
|
|
||||||
Description: desc,
|
|
||||||
PublishedDate: time.Unix(unixTime, 0),
|
|
||||||
ImgSrc: imgSrc,
|
|
||||||
ThumbnailSrc: thumbSrc,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,198 +1,84 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"encoding/json"
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// const (
|
|
||||||
// MessageTypeSearchImage uint8 = 11
|
|
||||||
// MessageTypeSearchImageThumb uint8 = 111
|
|
||||||
// MessageTypeSearchImageFull uint8 = 112
|
|
||||||
// MessageTypeSearchImageAllProxy uint8 = 113
|
|
||||||
// MessageTypeImageResults uint8 = 22
|
|
||||||
// )
|
|
||||||
|
|
||||||
var imageResultsChan = make(chan []ImageSearchResult)
|
var imageResultsChan = make(chan []ImageSearchResult)
|
||||||
|
|
||||||
func sendImageSearchRequestToNode(target, query, safe, lang string, page int, mode uint8) ([]ImageSearchResult, error) {
|
|
||||||
payload, err := encodeSearchTextParams(query, safe, lang, page)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("encode error: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
msg := Message{
|
|
||||||
ID: generateMessageID(),
|
|
||||||
Type: mode, // one of the image search types
|
|
||||||
Content: payload,
|
|
||||||
Target: target,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := sendMessage(msg); err != nil {
|
|
||||||
return nil, fmt.Errorf("send error: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
select {
|
|
||||||
case res := <-imageResultsChan:
|
|
||||||
return res, nil
|
|
||||||
case <-time.After(30 * time.Second):
|
|
||||||
return nil, fmt.Errorf("timeout from %s", target)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleImageResultsMessage(msg Message) {
|
func handleImageResultsMessage(msg Message) {
|
||||||
results, err := decodeImageResults([]byte(msg.Content))
|
var results []ImageSearchResult
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error decoding image results: %v", err)
|
printWarn("Error unmarshalling image results: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printDebug("Received image results: %+v", results)
|
|
||||||
|
|
||||||
|
printDebug("Received image results: %+v", results)
|
||||||
|
// Send results to imageResultsChan
|
||||||
go func() {
|
go func() {
|
||||||
imageResultsChan <- results
|
imageResultsChan <- results
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func encodeImageResults(results []ImageSearchResult) ([]byte, error) {
|
func sendImageSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]ImageSearchResult, error) {
|
||||||
buf := new(bytes.Buffer)
|
visitedNodes = append(visitedNodes, nodeAddr)
|
||||||
|
searchParams := struct {
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
|
Query string `json:"query"`
|
||||||
return nil, err
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
VisitedNodes []string `json:"visitedNodes"`
|
||||||
|
}{
|
||||||
|
Query: query,
|
||||||
|
Safe: safe,
|
||||||
|
Lang: lang,
|
||||||
|
Page: page,
|
||||||
|
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||||
|
VisitedNodes: visitedNodes,
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, r := range results {
|
msgBytes, err := json.Marshal(searchParams)
|
||||||
if err := writeString(buf, r.ID); err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||||
}
|
}
|
||||||
if err := writeString(buf, r.Title); err != nil {
|
|
||||||
return nil, err
|
msg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "search-image",
|
||||||
|
Content: string(msgBytes),
|
||||||
}
|
}
|
||||||
if err := writeString(buf, r.Full); err != nil {
|
|
||||||
return nil, err
|
err = sendMessage(nodeAddr, msg)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||||
}
|
}
|
||||||
if err := writeString(buf, r.Thumb); err != nil {
|
|
||||||
return nil, err
|
// Wait for results
|
||||||
}
|
select {
|
||||||
if err := writeString(buf, r.ProxyFull); err != nil {
|
case res := <-imageResultsChan:
|
||||||
return nil, err
|
return res, nil
|
||||||
}
|
case <-time.After(30 * time.Second):
|
||||||
if err := writeString(buf, r.ProxyThumb); err != nil {
|
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Source); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, int32(r.Width)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, int32(r.Height)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
func tryOtherNodesForImageSearch(query, safe, lang string, page int, visitedNodes []string) []ImageSearchResult {
|
||||||
}
|
for _, nodeAddr := range peers {
|
||||||
|
if contains(visitedNodes, nodeAddr) {
|
||||||
func decodeImageResults(data []byte) ([]ImageSearchResult, error) {
|
continue // Skip nodes already visited
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
|
|
||||||
var count uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results := make([]ImageSearchResult, 0, count)
|
|
||||||
for i := 0; i < int(count); i++ {
|
|
||||||
id, _ := readString(buf)
|
|
||||||
title, _ := readString(buf)
|
|
||||||
full, _ := readString(buf)
|
|
||||||
thumb, _ := readString(buf)
|
|
||||||
proxyFull, _ := readString(buf)
|
|
||||||
proxyThumb, _ := readString(buf)
|
|
||||||
source, _ := readString(buf)
|
|
||||||
|
|
||||||
var width, height int32
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &width); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &height); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, ImageSearchResult{
|
|
||||||
ID: id,
|
|
||||||
Title: title,
|
|
||||||
Full: full,
|
|
||||||
Thumb: thumb,
|
|
||||||
ProxyFull: proxyFull,
|
|
||||||
ProxyThumb: proxyThumb,
|
|
||||||
Source: source,
|
|
||||||
Width: int(width),
|
|
||||||
Height: int(height),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func prepareProxiedImages(results []ImageSearchResult, mode uint8) []ImageSearchResult {
|
|
||||||
for i := range results {
|
|
||||||
switch mode {
|
|
||||||
case MsgTypeSearchImageThumbRequest:
|
|
||||||
results[i].ProxyThumb = "/image/" + results[i].ID + "_thumb.webp"
|
|
||||||
case MsgTypeSearchImageFullRequest:
|
|
||||||
results[i].ProxyFull = "/image/" + results[i].ID + "_full.webp"
|
|
||||||
case MsgTypeSearchImageAllRequest:
|
|
||||||
results[i].ProxyThumb = "/image/" + results[i].ID + "_thumb.webp"
|
|
||||||
results[i].ProxyFull = "/image/" + results[i].ID + "_full.webp"
|
|
||||||
}
|
}
|
||||||
|
results, err := sendImageSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||||
|
if err != nil {
|
||||||
|
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
if len(results) > 0 {
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
}
|
||||||
// func handleSearchImageMessage(msg Message) {
|
return nil
|
||||||
// query, safe, lang, page, err := decodeSearchTextParams([]byte(msg.Content))
|
}
|
||||||
// if err != nil {
|
|
||||||
// printWarn("Error decoding image search parameters: %v", err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
|
|
||||||
// results := fetchImageResults(query, safe, lang, page, true, true)
|
|
||||||
|
|
||||||
// switch msg.Type {
|
|
||||||
// case MsgTypeSearchImageRawRequest:
|
|
||||||
// // No proxy URLs needed
|
|
||||||
|
|
||||||
// case MsgTypeSearchImageThumbRequest:
|
|
||||||
// results = prepareProxiedImages(results, MsgTypeSearchImageThumbRequest)
|
|
||||||
// case MsgTypeSearchImageFullRequest:
|
|
||||||
// results = prepareProxiedImages(results, MsgTypeSearchImageFullRequest)
|
|
||||||
// case MsgTypeSearchImageAllRequest:
|
|
||||||
// results = prepareProxiedImages(results, MsgTypeSearchImageAllRequest)
|
|
||||||
// default:
|
|
||||||
// printWarn("Unknown image request mode: %d", msg.Type)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
|
|
||||||
// payload, err := encodeImageResults(results)
|
|
||||||
// if err != nil {
|
|
||||||
// printWarn("Error encoding image search results: %v", err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
|
|
||||||
// response := Message{
|
|
||||||
// ID: msg.ID,
|
|
||||||
// Type: MsgTypeSearchImageResponse,
|
|
||||||
// Content: string(payload),
|
|
||||||
// Target: msg.Source, // Reply to sender
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if err := sendMessage(response); err != nil {
|
|
||||||
// printWarn("Error sending image search response: %v", err)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
|
@ -1,187 +0,0 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
var musicResultsChan = make(chan []MusicResult)
|
|
||||||
|
|
||||||
func tryOtherNodesForMusicSearch(query, lang string, safe bool, page int) []MusicResult {
|
|
||||||
safeStr := "inactive"
|
|
||||||
if safe {
|
|
||||||
safeStr = "active"
|
|
||||||
}
|
|
||||||
for _, nodeTarget := range sockets {
|
|
||||||
results, err := sendMusicSearchRequestToNode(nodeTarget, query, safeStr, lang, page)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Error contacting node %s: %v", nodeTarget, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if len(results) > 0 {
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func sendMusicSearchRequestToNode(target, query, safe, lang string, page int) ([]MusicResult, error) {
|
|
||||||
payload, err := encodeSearchTextParams(query, safe, lang, page)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("encode error: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
msg := Message{
|
|
||||||
ID: generateMessageID(),
|
|
||||||
Type: MsgTypeSearchMusicRequest,
|
|
||||||
Content: payload,
|
|
||||||
Target: target,
|
|
||||||
}
|
|
||||||
|
|
||||||
err = sendMessage(msg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to send music request to node %s: %v", target, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
select {
|
|
||||||
case res := <-musicResultsChan:
|
|
||||||
return res, nil
|
|
||||||
case <-time.After(20 * time.Second):
|
|
||||||
return nil, fmt.Errorf("timeout waiting for music results from node %s", target)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleMusicResultsMessage(msg Message) {
|
|
||||||
results, err := decodeMusicResults([]byte(msg.Content))
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Error decoding music results: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
printDebug("Received music results: %+v", results)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
musicResultsChan <- results
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeMusicResults(results []MusicResult) ([]byte, error) {
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range results {
|
|
||||||
if err := writeString(buf, r.URL); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Title); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Artist); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Description); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.PublishedDate); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Thumbnail); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Source); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Duration); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeMusicResults(data []byte) ([]MusicResult, error) {
|
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
|
|
||||||
var count uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results := make([]MusicResult, 0, count)
|
|
||||||
for i := 0; i < int(count); i++ {
|
|
||||||
url, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
title, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
artist, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
description, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
date, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
thumb, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
source, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
duration, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, MusicResult{
|
|
||||||
URL: url,
|
|
||||||
Title: title,
|
|
||||||
Artist: artist,
|
|
||||||
Description: description,
|
|
||||||
PublishedDate: date,
|
|
||||||
Thumbnail: thumb,
|
|
||||||
Source: source,
|
|
||||||
Duration: duration,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// func handleMusicSearchRequest(msg Message) {
|
|
||||||
// buf := bytes.NewReader([]byte(msg.Content))
|
|
||||||
// query, _ := readString(buf)
|
|
||||||
|
|
||||||
// var page uint16
|
|
||||||
// binary.Read(buf, binary.BigEndian, &page)
|
|
||||||
|
|
||||||
// results := fetchMusicResults(query, int(page))
|
|
||||||
// encoded, err := encodeMusicResults(results)
|
|
||||||
// if err != nil {
|
|
||||||
// printWarn("Encoding music results failed: %v", err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
|
|
||||||
// reply := Message{
|
|
||||||
// ID: msg.ID,
|
|
||||||
// Type: MsgTypeSearchMusicResponse,
|
|
||||||
// Content: string(encoded),
|
|
||||||
// Target: msg.Target, // Send back to sender
|
|
||||||
// }
|
|
||||||
// sendMessage(reply)
|
|
||||||
// }
|
|
|
@ -1,23 +1,21 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"encoding/json"
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var textResultsChan = make(chan []TextSearchResult)
|
var textResultsChan = make(chan []TextSearchResult)
|
||||||
|
|
||||||
// Try other nodes is not defined for every type
|
func tryOtherNodesForTextSearch(query, safe, lang string, page int, visitedNodes []string) []TextSearchResult {
|
||||||
func tryOtherNodesForTextSearch(query, safe, lang string, page int) []TextSearchResult {
|
for _, nodeAddr := range peers {
|
||||||
for _, nodeTarget := range sockets {
|
if contains(visitedNodes, nodeAddr) {
|
||||||
results, err := sendTextSearchRequestToNode(nodeTarget, query, safe, lang, page)
|
continue // Skip nodes already visited
|
||||||
|
}
|
||||||
|
results, err := sendTextSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error contacting node %s: %v", nodeTarget, err)
|
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
|
@ -27,102 +25,60 @@ func tryOtherNodesForTextSearch(query, safe, lang string, page int) []TextSearch
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func sendTextSearchRequestToNode(target, query, safe, lang string, page int) ([]TextSearchResult, error) {
|
func sendTextSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]TextSearchResult, error) {
|
||||||
payload, err := encodeSearchTextParams(query, safe, lang, page)
|
visitedNodes = append(visitedNodes, nodeAddr)
|
||||||
|
searchParams := struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
VisitedNodes []string `json:"visitedNodes"`
|
||||||
|
}{
|
||||||
|
Query: query,
|
||||||
|
Safe: safe,
|
||||||
|
Lang: lang,
|
||||||
|
Page: page,
|
||||||
|
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||||
|
VisitedNodes: visitedNodes,
|
||||||
|
}
|
||||||
|
|
||||||
|
msgBytes, err := json.Marshal(searchParams)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("encode error: %v", err)
|
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := Message{
|
msg := Message{
|
||||||
ID: generateMessageID(), // assume function returns uint32
|
ID: hostID,
|
||||||
Type: MsgTypeSearchTextRequest,
|
Type: "search-text",
|
||||||
Content: payload,
|
Content: string(msgBytes),
|
||||||
Target: target,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = sendMessage(msg)
|
err = sendMessage(nodeAddr, msg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to send search request to node %s: %v", target, err)
|
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Wait for results
|
||||||
select {
|
select {
|
||||||
case res := <-textResultsChan:
|
case res := <-textResultsChan:
|
||||||
return res, nil
|
return res, nil
|
||||||
case <-time.After(20 * time.Second):
|
case <-time.After(20 * time.Second):
|
||||||
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
|
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleTextResultsMessage(msg Message) {
|
func handleTextResultsMessage(msg Message) {
|
||||||
results, err := decodeTextResults([]byte(msg.Content))
|
var results []TextSearchResult
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error decoding text results: %v", err)
|
printWarn("Error unmarshalling text results: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printDebug("Received text results: %+v", results)
|
|
||||||
|
|
||||||
|
printDebug("Received text results: %+v", results)
|
||||||
|
// Send results to textResultsChan
|
||||||
go func() {
|
go func() {
|
||||||
textResultsChan <- results
|
textResultsChan <- results
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func encodeTextResults(results []TextSearchResult) ([]byte, error) {
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range results {
|
|
||||||
if err := writeString(buf, r.URL); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Header); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Description); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Source); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeTextResults(data []byte) ([]TextSearchResult, error) {
|
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
|
|
||||||
var count uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results := make([]TextSearchResult, 0, count)
|
|
||||||
for i := 0; i < int(count); i++ {
|
|
||||||
url, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
header, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
description, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
source, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
results = append(results, TextSearchResult{
|
|
||||||
URL: url,
|
|
||||||
Header: header,
|
|
||||||
Description: description,
|
|
||||||
Source: source,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,22 +1,19 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"encoding/json"
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var videoResultsChan = make(chan []VideoResult)
|
func tryOtherNodesForVideoSearch(query, safe, lang string, page int, visitedNodes []string) []VideoResult {
|
||||||
|
for _, nodeAddr := range peers {
|
||||||
func tryOtherNodesForVideoSearch(query, safe, lang string, page int) []VideoResult {
|
if contains(visitedNodes, nodeAddr) {
|
||||||
for _, node := range sockets {
|
continue // Skip nodes already visited
|
||||||
results, err := sendVideoSearchRequestToNode(node, query, safe, lang, page)
|
}
|
||||||
|
results, err := sendVideoSearchRequestToNode(nodeAddr, query, safe, lang, page, visitedNodes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error contacting node %s: %v", node, err)
|
printWarn("Error contacting node %s: %v", nodeAddr, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
|
@ -26,134 +23,60 @@ func tryOtherNodesForVideoSearch(query, safe, lang string, page int) []VideoResu
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func sendVideoSearchRequestToNode(target, query, safe, lang string, page int) ([]VideoResult, error) {
|
func sendVideoSearchRequestToNode(nodeAddr, query, safe, lang string, page int, visitedNodes []string) ([]VideoResult, error) {
|
||||||
payload, err := encodeSearchTextParams(query, safe, lang, page)
|
visitedNodes = append(visitedNodes, nodeAddr)
|
||||||
|
searchParams := struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Safe string `json:"safe"`
|
||||||
|
Lang string `json:"lang"`
|
||||||
|
Page int `json:"page"`
|
||||||
|
ResponseAddr string `json:"responseAddr"`
|
||||||
|
VisitedNodes []string `json:"visitedNodes"`
|
||||||
|
}{
|
||||||
|
Query: query,
|
||||||
|
Safe: safe,
|
||||||
|
Lang: lang,
|
||||||
|
Page: page,
|
||||||
|
ResponseAddr: fmt.Sprintf("http://localhost:%d/node", config.Port),
|
||||||
|
VisitedNodes: visitedNodes,
|
||||||
|
}
|
||||||
|
|
||||||
|
msgBytes, err := json.Marshal(searchParams)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("encode error: %v", err)
|
return nil, fmt.Errorf("failed to marshal search parameters: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := Message{
|
msg := Message{
|
||||||
ID: generateMessageID(),
|
ID: hostID,
|
||||||
Type: MsgTypeSearchVideoRequest,
|
Type: "search-video",
|
||||||
Content: payload,
|
Content: string(msgBytes),
|
||||||
Target: target,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sendMessage(msg); err != nil {
|
err = sendMessage(nodeAddr, msg)
|
||||||
return nil, fmt.Errorf("send error: %v", err)
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to send search request to node %s: %v", nodeAddr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Wait for results
|
||||||
select {
|
select {
|
||||||
case res := <-videoResultsChan:
|
case res := <-videoResultsChan:
|
||||||
return res, nil
|
return res, nil
|
||||||
case <-time.After(20 * time.Second):
|
case <-time.After(20 * time.Second):
|
||||||
return nil, fmt.Errorf("timeout waiting for results from node %s", target)
|
return nil, fmt.Errorf("timeout waiting for results from node %s", nodeAddr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleVideoResultsMessage(msg Message) {
|
func handleVideoResultsMessage(msg Message) {
|
||||||
results, err := decodeVideoResults([]byte(msg.Content))
|
var results []VideoResult
|
||||||
|
err := json.Unmarshal([]byte(msg.Content), &results)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Error decoding video results: %v", err)
|
printWarn("Error unmarshalling video results: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printDebug("Received video results: %+v", results)
|
|
||||||
|
|
||||||
|
printDebug("Received video results: %+v", results)
|
||||||
|
// Send results to videoResultsChan
|
||||||
go func() {
|
go func() {
|
||||||
videoResultsChan <- results
|
videoResultsChan <- results
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func encodeVideoResults(results []VideoResult) ([]byte, error) {
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(results))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range results {
|
|
||||||
if err := writeString(buf, r.Href); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Title); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Date); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Views); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Creator); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Publisher); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Image); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, r.Duration); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeVideoResults(data []byte) ([]VideoResult, error) {
|
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
|
|
||||||
var count uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &count); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results := make([]VideoResult, 0, count)
|
|
||||||
for i := 0; i < int(count); i++ {
|
|
||||||
href, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
title, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
date, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
views, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
creator, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
publisher, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
image, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
duration, err := readString(buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, VideoResult{
|
|
||||||
Href: href,
|
|
||||||
Title: title,
|
|
||||||
Date: date,
|
|
||||||
Views: views,
|
|
||||||
Creator: creator,
|
|
||||||
Publisher: publisher,
|
|
||||||
Image: image,
|
|
||||||
Duration: duration,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
28
node-update.go
Normal file
28
node-update.go
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Function to sync updates across all nodes
|
||||||
|
func nodeUpdateSync() {
|
||||||
|
fmt.Println("Syncing updates across all nodes...")
|
||||||
|
for _, peerAddr := range peers {
|
||||||
|
fmt.Printf("Notifying node %s about update...\n", peerAddr)
|
||||||
|
msg := Message{
|
||||||
|
ID: hostID,
|
||||||
|
Type: "update",
|
||||||
|
Content: "Start update process",
|
||||||
|
}
|
||||||
|
err := sendMessage(peerAddr, msg)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Failed to notify node %s: %v\n", peerAddr, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("Node %s notified. Waiting for it to update...\n", peerAddr)
|
||||||
|
time.Sleep(30 * time.Second) // Adjust sleep time as needed to allow for updates
|
||||||
|
}
|
||||||
|
fmt.Println("All nodes have been updated.")
|
||||||
|
}
|
350
node.go
350
node.go
|
@ -1,152 +1,75 @@
|
||||||
//go:build experimental
|
|
||||||
// +build experimental
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"crypto/rand"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io/ioutil"
|
||||||
"net"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
sockets []string
|
authCode string
|
||||||
|
peers []string
|
||||||
hostID string
|
hostID string
|
||||||
socketDir string
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Message struct {
|
type Message struct {
|
||||||
ID uint32
|
ID string `json:"id"`
|
||||||
Type uint8
|
Type string `json:"type"`
|
||||||
Content []byte
|
Content string `json:"content"`
|
||||||
Target string
|
VisitedNodes []string `json:"visitedNodes"`
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
|
||||||
MsgTypeNone uint8 = 0
|
|
||||||
MsgTypeTest uint8 = 1
|
|
||||||
|
|
||||||
// Request types (10–99)
|
|
||||||
MsgTypeSearchTextRequest uint8 = 10
|
|
||||||
MsgTypeSearchImageRawRequest uint8 = 11
|
|
||||||
MsgTypeSearchImageThumbRequest uint8 = 12
|
|
||||||
MsgTypeSearchImageFullRequest uint8 = 13
|
|
||||||
MsgTypeSearchImageAllRequest uint8 = 14
|
|
||||||
MsgTypeSearchVideoRequest uint8 = 15
|
|
||||||
MsgTypeSearchFileRequest uint8 = 16
|
|
||||||
MsgTypeSearchForumRequest uint8 = 17
|
|
||||||
MsgTypeSearchMusicRequest uint8 = 18
|
|
||||||
|
|
||||||
// Response types (110–199)
|
|
||||||
MsgTypeSearchTextResponse uint8 = 110
|
|
||||||
MsgTypeSearchImageResponse uint8 = 111
|
|
||||||
MsgTypeSearchVideoResponse uint8 = 112
|
|
||||||
MsgTypeSearchFileResponse uint8 = 113
|
|
||||||
MsgTypeSearchForumResponse uint8 = 114
|
|
||||||
MsgTypeSearchMusicResponse uint8 = 115
|
|
||||||
)
|
|
||||||
|
|
||||||
func loadNodeConfig() {
|
func loadNodeConfig() {
|
||||||
sockets = config.Nodes
|
authCode = config.AuthCode
|
||||||
socketDir = "/tmp/" // Directory where sockets are stored, for now fixed tmp dir, can be changed later
|
peers = config.Peers
|
||||||
}
|
}
|
||||||
|
|
||||||
var messageIDCounter uint32 = 0
|
func generateHostID() (string, error) {
|
||||||
|
bytes := make([]byte, 16)
|
||||||
func generateMessageID() uint32 {
|
_, err := rand.Read(bytes)
|
||||||
if messageIDCounter == ^uint32(0) { // 0xFFFFFFFF
|
|
||||||
messageIDCounter = 1
|
|
||||||
} else {
|
|
||||||
messageIDCounter++
|
|
||||||
}
|
|
||||||
return messageIDCounter
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeSearchTextParams(query, safe, lang string, page int) ([]byte, error) {
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
if err := writeString(buf, query); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, safe); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := writeString(buf, lang); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(page)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func sendMessage(msg Message) error {
|
|
||||||
socketPath := socketDir + msg.Target + ".sock"
|
|
||||||
|
|
||||||
conn, err := net.Dial("unix", socketPath)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to connect to socket %s: %v", socketPath, err)
|
return "", fmt.Errorf("failed to generate host ID: %v", err)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%x", bytes), nil
|
||||||
}
|
}
|
||||||
defer conn.Close()
|
|
||||||
|
|
||||||
msgBytes, err := serializeMessage(msg)
|
func sendMessage(serverAddr string, msg Message) error {
|
||||||
|
if serverAddr == "" {
|
||||||
|
return fmt.Errorf("server address is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
msgBytes, err := json.Marshal(msg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("serialization error: %v", err)
|
return fmt.Errorf("failed to marshal message: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = conn.Write(msgBytes)
|
req, err := http.NewRequest("POST", serverAddr, bytes.NewBuffer(msgBytes))
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func startUnixSocketServer(socketName string) {
|
|
||||||
socketPath := socketDir + socketName + ".sock"
|
|
||||||
|
|
||||||
if _, err := os.Stat(socketPath); err == nil {
|
|
||||||
os.Remove(socketPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
listener, err := net.Listen("unix", socketPath)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(fmt.Sprintf("Failed to listen on %s: %v", socketPath, err))
|
return fmt.Errorf("failed to create request: %v", err)
|
||||||
}
|
}
|
||||||
defer listener.Close()
|
req.Header.Set("Content-Type", "application/json")
|
||||||
os.Chmod(socketPath, 0666)
|
req.Header.Set("Authorization", authCode)
|
||||||
|
|
||||||
printInfo("Listening on UNIX socket: %s", socketPath)
|
client := &http.Client{
|
||||||
|
Timeout: time.Second * 10,
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
resp, err := client.Do(req)
|
||||||
conn, err := listener.Accept()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
printWarn("Accept error: %v", err)
|
return fmt.Errorf("failed to send request: %v", err)
|
||||||
continue
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
body, _ := ioutil.ReadAll(resp.Body)
|
||||||
|
return fmt.Errorf("server error: %s", body)
|
||||||
}
|
}
|
||||||
|
|
||||||
go func(c net.Conn) {
|
return nil
|
||||||
defer c.Close()
|
|
||||||
buf, err := io.ReadAll(c)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Read error: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
msg, err := deserializeMessage(buf)
|
|
||||||
if err != nil {
|
|
||||||
printWarn("Deserialization error: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
printDebug("Received binary message: %+v", msg)
|
|
||||||
interpretMessage(msg)
|
|
||||||
}(conn)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
|
func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -155,6 +78,12 @@ func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auth := r.Header.Get("Authorization")
|
||||||
|
if auth != authCode {
|
||||||
|
http.Error(w, "Unauthorized", http.StatusUnauthorized)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
var msg Message
|
var msg Message
|
||||||
err := json.NewDecoder(r.Body).Decode(&msg)
|
err := json.NewDecoder(r.Body).Decode(&msg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -163,177 +92,64 @@ func handleNodeRequest(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
defer r.Body.Close()
|
defer r.Body.Close()
|
||||||
|
|
||||||
printDebug("Received HTTP message: %+v", msg)
|
printDebug("Received message: %+v\n", msg)
|
||||||
|
w.Write([]byte("Message received"))
|
||||||
|
|
||||||
interpretMessage(msg)
|
interpretMessage(msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
func startNodeClientUnix() {
|
func startNodeClient() {
|
||||||
var idCounter uint32 = 0
|
|
||||||
|
|
||||||
for {
|
for {
|
||||||
|
for _, peerAddr := range peers {
|
||||||
msg := Message{
|
msg := Message{
|
||||||
ID: idCounter,
|
ID: hostID,
|
||||||
Type: MsgTypeTest,
|
Type: "test",
|
||||||
Content: []byte("This is a test message via UNIX socket"),
|
Content: "This is a test message from the client node",
|
||||||
Target: "node2", ///!!!
|
|
||||||
}
|
}
|
||||||
idCounter++
|
|
||||||
|
|
||||||
if err := sendMessage(msg); err != nil {
|
err := sendMessage(peerAddr, msg)
|
||||||
printWarn("Send error: %v", err)
|
if err != nil {
|
||||||
|
printWarn("Error sending message to %s: %v", peerAddr, err)
|
||||||
|
} else {
|
||||||
|
printInfo("Message sent successfully to: %s", peerAddr)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
time.Sleep(10 * time.Second)
|
time.Sleep(10 * time.Second)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func interpretMessage(msg Message) {
|
func interpretMessage(msg Message) {
|
||||||
printDebug("Received message: %s", msg.Content)
|
|
||||||
|
|
||||||
switch msg.Type {
|
switch msg.Type {
|
||||||
case MsgTypeTest:
|
case "test":
|
||||||
handleTestMessage(msg)
|
printDebug("Received test message: %v", msg.Content)
|
||||||
case MsgTypeSearchTextRequest:
|
case "update":
|
||||||
|
printDebug("Received update message: %v", msg.Content)
|
||||||
|
go update()
|
||||||
|
case "heartbeat":
|
||||||
|
handleHeartbeat(msg.Content)
|
||||||
|
case "election":
|
||||||
|
handleElection(msg.Content)
|
||||||
|
case "search-text":
|
||||||
handleSearchTextMessage(msg)
|
handleSearchTextMessage(msg)
|
||||||
case MsgTypeSearchImageRawRequest, MsgTypeSearchImageThumbRequest, MsgTypeSearchImageFullRequest, MsgTypeSearchImageAllRequest:
|
case "search-image":
|
||||||
handleSearchImageMessage(msg)
|
handleSearchImageMessage(msg)
|
||||||
case MsgTypeSearchVideoRequest:
|
case "search-video":
|
||||||
handleSearchVideoMessage(msg)
|
handleSearchVideoMessage(msg)
|
||||||
case MsgTypeSearchFileRequest:
|
case "search-file":
|
||||||
handleSearchFileMessage(msg)
|
handleSearchFileMessage(msg)
|
||||||
case MsgTypeSearchForumRequest:
|
case "search-forum":
|
||||||
handleSearchForumMessage(msg)
|
handleSearchForumMessage(msg)
|
||||||
case MsgTypeSearchMusicRequest:
|
case "forum-results":
|
||||||
handleSearchMusicMessage(msg)
|
|
||||||
|
|
||||||
case MsgTypeSearchTextResponse:
|
|
||||||
handleTextResultsMessage(msg)
|
|
||||||
case MsgTypeSearchImageResponse:
|
|
||||||
handleImageResultsMessage(msg)
|
|
||||||
case MsgTypeSearchVideoResponse:
|
|
||||||
handleVideoResultsMessage(msg)
|
|
||||||
case MsgTypeSearchFileResponse:
|
|
||||||
handleFileResultsMessage(msg)
|
|
||||||
case MsgTypeSearchForumResponse:
|
|
||||||
handleForumResultsMessage(msg)
|
handleForumResultsMessage(msg)
|
||||||
case MsgTypeSearchMusicResponse:
|
case "text-results":
|
||||||
handleMusicResultsMessage(msg)
|
handleTextResultsMessage(msg)
|
||||||
|
case "image-results":
|
||||||
|
handleImageResultsMessage(msg)
|
||||||
|
case "video-results":
|
||||||
|
handleVideoResultsMessage(msg)
|
||||||
|
case "file-results":
|
||||||
|
handleFileResultsMessage(msg)
|
||||||
default:
|
default:
|
||||||
printWarn("Unknown message type: %d", msg.Type)
|
printWarn("Received unknown message type: %v", msg.Type)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serialize Message to binary
|
|
||||||
func serializeMessage(msg Message) ([]byte, error) {
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, msg.ID); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, msg.Type); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Content
|
|
||||||
contentBytes := []byte(msg.Content)
|
|
||||||
if len(contentBytes) > 65535 {
|
|
||||||
return nil, errors.New("content too long")
|
|
||||||
}
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(contentBytes))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
buf.Write(contentBytes)
|
|
||||||
|
|
||||||
// Target
|
|
||||||
targetBytes := []byte(msg.Target)
|
|
||||||
if len(targetBytes) > 255 {
|
|
||||||
return nil, errors.New("target name too long")
|
|
||||||
}
|
|
||||||
if err := buf.WriteByte(uint8(len(targetBytes))); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
buf.Write(targetBytes)
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deserialize binary to Message
|
|
||||||
func deserializeMessage(data []byte) (Message, error) {
|
|
||||||
buf := bytes.NewReader(data)
|
|
||||||
var msg Message
|
|
||||||
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &msg.ID); err != nil {
|
|
||||||
return msg, err
|
|
||||||
}
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &msg.Type); err != nil {
|
|
||||||
return msg, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var contentLen uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &contentLen); err != nil {
|
|
||||||
return msg, err
|
|
||||||
}
|
|
||||||
content := make([]byte, contentLen)
|
|
||||||
if _, err := io.ReadFull(buf, content); err != nil {
|
|
||||||
return msg, err
|
|
||||||
}
|
|
||||||
msg.Content = content
|
|
||||||
|
|
||||||
var targetLen uint8
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &targetLen); err != nil {
|
|
||||||
return msg, err
|
|
||||||
}
|
|
||||||
target := make([]byte, targetLen)
|
|
||||||
if _, err := io.ReadFull(buf, target); err != nil {
|
|
||||||
return msg, err
|
|
||||||
}
|
|
||||||
msg.Target = string(target)
|
|
||||||
|
|
||||||
return msg, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeString(buf *bytes.Buffer, s string) error {
|
|
||||||
if err := binary.Write(buf, binary.BigEndian, uint16(len(s))); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
_, err := buf.Write([]byte(s))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func readString(buf *bytes.Reader) (string, error) {
|
|
||||||
var length uint16
|
|
||||||
if err := binary.Read(buf, binary.BigEndian, &length); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
strBytes := make([]byte, length)
|
|
||||||
if _, err := io.ReadFull(buf, strBytes); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return string(strBytes), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type testPayload struct {
|
|
||||||
Message string `json:"message"`
|
|
||||||
ResponseAddr string `json:"ResponseAddr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleTestMessage(msg Message) {
|
|
||||||
var payload testPayload
|
|
||||||
if err := json.Unmarshal([]byte(msg.Content), &payload); err != nil {
|
|
||||||
printWarn("Failed to parse test payload: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
printDebug("Received message: %s", payload.Message)
|
|
||||||
printInfo("Received TEST message: %s", payload.Message)
|
|
||||||
|
|
||||||
reply := Message{
|
|
||||||
ID: msg.ID,
|
|
||||||
Type: MsgTypeTest,
|
|
||||||
Content: []byte("hello test"),
|
|
||||||
Target: payload.ResponseAddr,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := sendMessage(reply); err != nil {
|
|
||||||
printWarn("Failed to send test response: %v", err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,57 +10,28 @@ type OpenSearchDescription struct {
|
||||||
XMLName xml.Name `xml:"OpenSearchDescription"`
|
XMLName xml.Name `xml:"OpenSearchDescription"`
|
||||||
Xmlns string `xml:"xmlns,attr"`
|
Xmlns string `xml:"xmlns,attr"`
|
||||||
ShortName string `xml:"ShortName"`
|
ShortName string `xml:"ShortName"`
|
||||||
LongName string `xml:"LongName"`
|
|
||||||
Description string `xml:"Description"`
|
Description string `xml:"Description"`
|
||||||
Tags string `xml:"Tags,omitempty"`
|
Tags string `xml:"Tags"`
|
||||||
InputEncoding string `xml:"InputEncoding"`
|
|
||||||
OutputEncoding string `xml:"OutputEncoding"`
|
|
||||||
Images []Image `xml:"Image"`
|
|
||||||
URLs []URL `xml:"Url"`
|
URLs []URL `xml:"Url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type URL struct {
|
type URL struct {
|
||||||
Type string `xml:"type,attr"`
|
Type string `xml:"type,attr"`
|
||||||
Method string `xml:"method,attr,omitempty"`
|
|
||||||
Template string `xml:"template,attr"`
|
Template string `xml:"template,attr"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Image struct {
|
|
||||||
Height int `xml:"height,attr"`
|
|
||||||
Width int `xml:"width,attr"`
|
|
||||||
Type string `xml:"type,attr"`
|
|
||||||
URL string `xml:",chardata"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateOpenSearchXML(config Config) {
|
func generateOpenSearchXML(config Config) {
|
||||||
baseURL := addProtocol(config.Domain)
|
// Ensure that language is initialized in `main` before calling this function
|
||||||
|
|
||||||
|
baseURL := addProtocol(config.Domain)
|
||||||
opensearch := OpenSearchDescription{
|
opensearch := OpenSearchDescription{
|
||||||
Xmlns: "http://a9.com/-/spec/opensearch/1.1/",
|
Xmlns: "http://a9.com/-/spec/opensearch/1.1/",
|
||||||
ShortName: Translate("site_name"),
|
ShortName: Translate("site_name"),
|
||||||
LongName: Translate("site_name") + " Search",
|
|
||||||
Description: Translate("site_description"),
|
Description: Translate("site_description"),
|
||||||
Tags: Translate("site_tags"),
|
Tags: Translate("site_tags"),
|
||||||
InputEncoding: "UTF-8",
|
|
||||||
OutputEncoding: "UTF-8",
|
|
||||||
Images: []Image{
|
|
||||||
{
|
|
||||||
Height: 512,
|
|
||||||
Width: 512,
|
|
||||||
Type: "image/svg+xml",
|
|
||||||
URL: fmt.Sprintf("%s/static/images/icon-512.svg", baseURL),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Height: 16,
|
|
||||||
Width: 16,
|
|
||||||
Type: "image/png",
|
|
||||||
URL: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAABYAAAAWAF42ktiAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAA2pJREFUOI1lk11oW3UYxp//ycnhpHqapmlssrZLkzRbu9qPrVYWxhTpsLPD4fRiDBmj6ubwZhfChCHSgjBB59B5McEboepWVkSLdQ432rVZ2s3B0jQtga7LcnKa9CNtlrT56EnO64XbqPrCe/c8P3hfnocRETZPMpm0hyPK52VGaa8gCCUMTMtkMwsg9CWTK1+2tbVlNuvZJgAfnA5d2eZ2HvQHguza9WHcvx+GXs+jtaUJBw90UonBsL6ykjjhdDp/ekogIhCR7sGDh1MLi0v01pFuKjHX0rPPOana3UqiqYYM5VvJaHPT+a8vkqqqxVAodPyx7x9AIDjTt5xYoVZPB4mmGhLLqqn5hZfo+o0R+uhMLz1TYadDh4/R4aMn6OPes5TP5wt+v383EYFlMpkqnuflD06dZn2XBmAyGZFYTuDke8cQmAqiproKjTsacPG77yEaRIQjCoZ+/oHcLoe/asuWndxcOPxhVJlnlwd+Ba/jsLt9F3gdh6bGevjGb0OZn4f31jgURcHcXBikaTj7xQVms1pbxsbGtnGCIHZ5fbehFgpQVRW/Df2BN15/DaNeH9SNHNwuB4auXntyLwDCqHcC2VyOLBZLB8frOCkiK2CPn9q1fx9cDjsWFuIg0rDdXQcdx2CzVYJAYGAACPH4kiYIQhUHsKLRWAqDKAIA9r/agc/OfQWtSPi09xOAAUVNQywWBwDs2tmMlqZGGI0SCoVCikulUtONDduRzeUAxqCqKgCGib/uQpJKsaEWwBiHfa+8DMY4+APTiMUXYS436dLp9AxXYbZ8s8fzIpwOO2zWSmSzeeyor0dLczNGb40DYGCcDge6OsEYQ7FYxPvvHkUqlUqPjIz8yYgI0WgsNhcOW/suDWDM68Px7rfBcTqo6gaICJJUCq9vAld+GUKdy4E7N69qsiyfc7vdp0FEmJ2dfX5tfb3w4+UBkipdJJpqSLLUkslWR2WVLjKUb6U3j3RT+95OisjRoizLgf7+fuFpEokIwVCoYzWZzAVnQvTOyVNUaq0jg9lOBrOdahva6fyFbym9tqY9jMiBwcHBin9F+clOTk6a7vknf08+epTP5zdImY/R0nJC0zSNolFl0R8InOnp6eE3e9h/6wwAw8PDvNVq3aPX6x2qquZXV1enPB7PFID/if8GRa7Q/nLxcNoAAAAASUVORK5CYII=",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
URLs: []URL{
|
URLs: []URL{
|
||||||
{
|
{
|
||||||
Type: "text/html",
|
Type: "text/html",
|
||||||
Method: "get",
|
|
||||||
Template: fmt.Sprintf("%s/search?q={searchTerms}", baseURL),
|
Template: fmt.Sprintf("%s/search?q={searchTerms}", baseURL),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -77,8 +48,6 @@ func generateOpenSearchXML(config Config) {
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
file.WriteString(`<?xml version="1.0" encoding="UTF-8"?>` + "\n")
|
|
||||||
|
|
||||||
enc := xml.NewEncoder(file)
|
enc := xml.NewEncoder(file)
|
||||||
enc.Indent(" ", " ")
|
enc.Indent(" ", " ")
|
||||||
if err := enc.Encode(opensearch); err != nil {
|
if err := enc.Encode(opensearch); err != nil {
|
||||||
|
|
270
proxy.go
270
proxy.go
|
@ -1,270 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"golang.org/x/net/proxy"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ProxyConfig holds configuration for a single proxy.
|
|
||||||
type ProxyConfig struct {
|
|
||||||
Address string
|
|
||||||
Username string
|
|
||||||
Password string
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProxyClient provides an HTTP client pool for proxies.
|
|
||||||
type ProxyClient struct {
|
|
||||||
clients []*http.Client
|
|
||||||
lock sync.Mutex
|
|
||||||
index int
|
|
||||||
}
|
|
||||||
|
|
||||||
// Package-level proxy clients
|
|
||||||
var (
|
|
||||||
metaProxyClient *ProxyClient
|
|
||||||
crawlerProxyClient *ProxyClient
|
|
||||||
)
|
|
||||||
|
|
||||||
// NewProxyClientPool creates a pool of HTTP clients with SOCKS5 proxies.
|
|
||||||
func NewProxyClientPool(proxies []ProxyConfig, timeout time.Duration) (*ProxyClient, error) {
|
|
||||||
if len(proxies) == 0 {
|
|
||||||
return nil, fmt.Errorf("no proxies provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
clients := make([]*http.Client, len(proxies))
|
|
||||||
|
|
||||||
for i, pc := range proxies {
|
|
||||||
var auth *proxy.Auth
|
|
||||||
if pc.Username != "" || pc.Password != "" {
|
|
||||||
auth = &proxy.Auth{
|
|
||||||
User: pc.Username,
|
|
||||||
Password: pc.Password,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dialer, err := proxy.SOCKS5("tcp", pc.Address, auth, proxy.Direct)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to create SOCKS5 dialer for %s: %w", pc.Address, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
transport := &http.Transport{Dial: dialer.Dial}
|
|
||||||
clients[i] = &http.Client{
|
|
||||||
Transport: transport,
|
|
||||||
Timeout: timeout,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &ProxyClient{clients: clients}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do sends an HTTP request using the next proxy in the pool.
|
|
||||||
func (p *ProxyClient) Do(req *http.Request) (*http.Response, error) {
|
|
||||||
p.lock.Lock()
|
|
||||||
client := p.clients[p.index]
|
|
||||||
p.index = (p.index + 1) % len(p.clients)
|
|
||||||
p.lock.Unlock()
|
|
||||||
return client.Do(req)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *ProxyClient) GetProxy() string {
|
|
||||||
p.lock.Lock()
|
|
||||||
defer p.lock.Unlock()
|
|
||||||
|
|
||||||
if len(p.clients) == 0 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Round-robin proxy retrieval
|
|
||||||
client := p.clients[p.index]
|
|
||||||
p.index = (p.index + 1) % len(p.clients)
|
|
||||||
|
|
||||||
// Assume each client has a proxy string saved
|
|
||||||
// Example implementation depends on how your proxies are configured
|
|
||||||
proxyTransport, ok := client.Transport.(*http.Transport)
|
|
||||||
if ok && proxyTransport.Proxy != nil {
|
|
||||||
proxyURL, _ := proxyTransport.Proxy(nil)
|
|
||||||
if proxyURL != nil {
|
|
||||||
return proxyURL.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseProxies parses the proxy strings in the format ADDRESS:PORT or ADDRESS:PORT:USER:PASSWORD.
|
|
||||||
func ParseProxies(proxyStrings []string) []ProxyConfig {
|
|
||||||
var proxies []ProxyConfig
|
|
||||||
for _, proxyStr := range proxyStrings {
|
|
||||||
parts := strings.Split(proxyStr, ":")
|
|
||||||
switch len(parts) {
|
|
||||||
case 2: // ADDRESS:PORT
|
|
||||||
proxies = append(proxies, ProxyConfig{
|
|
||||||
Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
|
|
||||||
})
|
|
||||||
case 4: // ADDRESS:PORT:USER:PASSWORD
|
|
||||||
proxies = append(proxies, ProxyConfig{
|
|
||||||
Address: fmt.Sprintf("%s:%s", parts[0], parts[1]),
|
|
||||||
Username: parts[2],
|
|
||||||
Password: parts[3],
|
|
||||||
})
|
|
||||||
default:
|
|
||||||
fmt.Printf("Invalid proxy format: %s\n", proxyStr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return proxies
|
|
||||||
}
|
|
||||||
|
|
||||||
// InitProxies initializes the proxy clients for Meta and Crawler proxies.
|
|
||||||
func InitProxies() {
|
|
||||||
// Initialize Meta Proxy Client
|
|
||||||
if config.MetaProxyEnabled {
|
|
||||||
metaProxies := ParseProxies(config.MetaProxies)
|
|
||||||
client, err := NewProxyClientPool(metaProxies, 30*time.Second)
|
|
||||||
if err != nil {
|
|
||||||
if config.MetaProxyStrict {
|
|
||||||
panic(fmt.Sprintf("Failed to initialize Meta proxies: %v", err))
|
|
||||||
}
|
|
||||||
fmt.Printf("Warning: Meta proxy initialization failed: %v\n", err)
|
|
||||||
}
|
|
||||||
metaProxyClient = client
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize Crawler Proxy Client
|
|
||||||
if config.CrawlerProxyEnabled {
|
|
||||||
crawlerProxies := ParseProxies(config.CrawlerProxies)
|
|
||||||
client, err := NewProxyClientPool(crawlerProxies, 30*time.Second)
|
|
||||||
if err != nil {
|
|
||||||
if config.CrawlerProxyStrict {
|
|
||||||
panic(fmt.Sprintf("Failed to initialize Crawler proxies: %v", err))
|
|
||||||
}
|
|
||||||
fmt.Printf("Warning: Crawler proxy initialization failed: %v\n", err)
|
|
||||||
}
|
|
||||||
crawlerProxyClient = client
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Doer is an interface so we can accept *http.Client or *ProxyClient for requests.
|
|
||||||
type Doer interface {
|
|
||||||
Do(*http.Request) (*http.Response, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// DoProxyRequest handles “try direct, then proxy if needed,” with retries if proxy is used.
|
|
||||||
//
|
|
||||||
// - strict: if true, always try proxy first if enabled; if not available, do one direct attempt
|
|
||||||
// - enabled: whether this type of proxy is turned on
|
|
||||||
// - retryCount: how many times to retry with the proxy
|
|
||||||
// - proxyClient: the pool of proxy connections
|
|
||||||
func DoProxyRequest(req *http.Request, strict bool, enabled bool, retryCount int, proxyClient *ProxyClient) (*http.Response, error) {
|
|
||||||
// 1) If !strict => try direct once first
|
|
||||||
if !strict {
|
|
||||||
resp, err := tryRequestOnce(req, http.DefaultClient)
|
|
||||||
if isSuccessful(resp, err) {
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
// If direct fails => if proxy is enabled, retry
|
|
||||||
if enabled && proxyClient != nil {
|
|
||||||
resp, err = tryRequestWithRetry(req, proxyClient, retryCount)
|
|
||||||
if isSuccessful(resp, err) {
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("failed after direct & proxy attempts: %v", err)
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("request failed direct, no valid proxy: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2) If strict => if proxy is enabled, try it up to “retryCount”
|
|
||||||
if enabled && proxyClient != nil {
|
|
||||||
resp, err := tryRequestWithRetry(req, proxyClient, retryCount)
|
|
||||||
if isSuccessful(resp, err) {
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("failed after %d proxy attempts: %v", retryCount, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// If strict but no proxy => direct once
|
|
||||||
resp, err := tryRequestOnce(req, http.DefaultClient)
|
|
||||||
if isSuccessful(resp, err) {
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("direct request failed in strict mode, no proxy: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper Wrapper functions for DoProxyRequest()
|
|
||||||
func DoMetaProxyRequest(req *http.Request) (*http.Response, error) {
|
|
||||||
return DoProxyRequest(
|
|
||||||
req,
|
|
||||||
config.MetaProxyStrict,
|
|
||||||
config.MetaProxyEnabled,
|
|
||||||
config.MetaProxyRetry,
|
|
||||||
metaProxyClient,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
func DoCrawlerProxyRequest(req *http.Request) (*http.Response, error) {
|
|
||||||
return DoProxyRequest(
|
|
||||||
req,
|
|
||||||
config.CrawlerProxyStrict,
|
|
||||||
config.CrawlerProxyEnabled,
|
|
||||||
config.CrawlerProxyRetry,
|
|
||||||
metaProxyClient,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// tryRequestWithRetry tries the request up to "retries" times, waiting 200ms between attempts.
|
|
||||||
func tryRequestWithRetry(req *http.Request, client Doer, retries int) (*http.Response, error) {
|
|
||||||
var resp *http.Response
|
|
||||||
var err error
|
|
||||||
for i := 1; i <= retries; i++ {
|
|
||||||
if resp != nil {
|
|
||||||
resp.Body.Close()
|
|
||||||
}
|
|
||||||
printDebug("Attempt %d of %d with proxy/client...", i, retries)
|
|
||||||
resp, err = tryRequestOnce(req, client)
|
|
||||||
if isSuccessful(resp, err) {
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
time.Sleep(200 * time.Millisecond)
|
|
||||||
}
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// tryRequestOnce sends a single request with the given client. If client is nil, uses default client.
|
|
||||||
func tryRequestOnce(req *http.Request, client Doer) (*http.Response, error) {
|
|
||||||
if client == nil {
|
|
||||||
client = http.DefaultClient
|
|
||||||
}
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// isSuccessful checks if err==nil & resp != nil & resp.StatusCode in [200..299].
|
|
||||||
func isSuccessful(resp *http.Response, err error) bool {
|
|
||||||
if err != nil || resp == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return resp.StatusCode >= 200 && resp.StatusCode < 300
|
|
||||||
}
|
|
||||||
|
|
||||||
// func main() {
|
|
||||||
// config := loadConfig()
|
|
||||||
|
|
||||||
// // Initialize proxies if enabled
|
|
||||||
// if config.CrawlerProxyEnabled || config.MetaProxyEnabled {
|
|
||||||
// InitProxies()
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // Example usage
|
|
||||||
// if metaProxyClient != nil {
|
|
||||||
// req, _ := http.NewRequest("GET", "https://example.com", nil)
|
|
||||||
// resp, err := metaProxyClient.Do(req)
|
|
||||||
// if err != nil {
|
|
||||||
// fmt.Printf("Error using MetaProxyClient: %v\n", err)
|
|
||||||
// } else {
|
|
||||||
// fmt.Printf("Meta Proxy Response Status: %s\n", resp.Status)
|
|
||||||
// resp.Body.Close()
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
42
run.bat
42
run.bat
|
@ -5,7 +5,7 @@ rem Initialize variables
|
||||||
set SKIP_CONFIG=""
|
set SKIP_CONFIG=""
|
||||||
set PORT=""
|
set PORT=""
|
||||||
set DOMAIN=""
|
set DOMAIN=""
|
||||||
set CONFIG_FILE=""
|
set BUILD_MODE=false
|
||||||
set BUILD_OUTPUT=qgato.exe
|
set BUILD_OUTPUT=qgato.exe
|
||||||
|
|
||||||
rem Parse arguments
|
rem Parse arguments
|
||||||
|
@ -23,14 +23,13 @@ if "%~1"=="--domain" (
|
||||||
shift
|
shift
|
||||||
goto parse_args
|
goto parse_args
|
||||||
)
|
)
|
||||||
if "%~1"=="--config" (
|
if "%~1"=="--skip-config-check" (
|
||||||
set CONFIG_FILE=%~2
|
set SKIP_CONFIG=--skip-config-check
|
||||||
shift
|
|
||||||
shift
|
shift
|
||||||
goto parse_args
|
goto parse_args
|
||||||
)
|
)
|
||||||
if "%~1"=="--skip-config-check" (
|
if "%~1"=="--build" (
|
||||||
set SKIP_CONFIG=--skip-config-check
|
set BUILD_MODE=true
|
||||||
shift
|
shift
|
||||||
goto parse_args
|
goto parse_args
|
||||||
)
|
)
|
||||||
|
@ -42,29 +41,46 @@ exit /b 1
|
||||||
rem Use the current directory where the script is executed
|
rem Use the current directory where the script is executed
|
||||||
pushd %~dp0
|
pushd %~dp0
|
||||||
|
|
||||||
rem Always delete and rebuild the binary
|
rem Collect all .go files in the current directory excluding *_test.go
|
||||||
echo Cleaning previous build...
|
set GO_FILES=
|
||||||
if exist "%BUILD_OUTPUT%" del "%BUILD_OUTPUT%"
|
for %%f in (*.go) do (
|
||||||
|
echo %%f | findstr "_test.go" >nul
|
||||||
|
if errorlevel 1 (
|
||||||
|
set GO_FILES=!GO_FILES! %%f
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%BUILD_MODE%"=="true" (
|
||||||
|
rem Build mode
|
||||||
echo Building application...
|
echo Building application...
|
||||||
go build -ldflags="-s -w" -o "%BUILD_OUTPUT%" .
|
go build -o "%BUILD_OUTPUT%" !GO_FILES!
|
||||||
if errorlevel 1 (
|
if errorlevel 1 (
|
||||||
echo Build failed!
|
echo Build failed!
|
||||||
exit /b 1
|
exit /b 1
|
||||||
)
|
)
|
||||||
echo Build successful! Output: %CD%\%BUILD_OUTPUT%
|
echo Build successful! Output: %CD%\%BUILD_OUTPUT%
|
||||||
|
) else (
|
||||||
|
rem Check if the executable exists
|
||||||
|
if not exist "%BUILD_OUTPUT%" (
|
||||||
|
echo Executable not found. Building it first...
|
||||||
|
go build -o "%BUILD_OUTPUT%" !GO_FILES!
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo Build failed! Unable to run the application.
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
rem Construct the command
|
rem Construct the command
|
||||||
set CMD=%BUILD_OUTPUT% !SKIP_CONFIG!
|
set CMD="%BUILD_OUTPUT% !SKIP_CONFIG!"
|
||||||
if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
|
if not "%PORT%"=="" set CMD=!CMD! --port %PORT%
|
||||||
if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
|
if not "%DOMAIN%"=="" set CMD=!CMD! --domain %DOMAIN%
|
||||||
if not "%CONFIG_FILE%"=="" set CMD=!CMD! --config %CONFIG_FILE%
|
|
||||||
|
|
||||||
rem Informative output
|
rem Informative output
|
||||||
echo Starting application with command: !CMD!
|
echo Starting application with command: !CMD!
|
||||||
|
|
||||||
rem Run the built executable
|
rem Run the application
|
||||||
call !CMD!
|
call !CMD!
|
||||||
|
)
|
||||||
|
|
||||||
rem Return to the original directory
|
rem Return to the original directory
|
||||||
popd
|
popd
|
||||||
|
|
64
run.sh
64
run.sh
|
@ -4,9 +4,7 @@
|
||||||
SKIP_CONFIG=""
|
SKIP_CONFIG=""
|
||||||
PORT=""
|
PORT=""
|
||||||
DOMAIN=""
|
DOMAIN=""
|
||||||
CONFIG_FILE=""
|
BUILD_MODE=false
|
||||||
BUILD_ONLY=0
|
|
||||||
PLATFORM="linux"
|
|
||||||
BUILD_OUTPUT="qgato"
|
BUILD_OUTPUT="qgato"
|
||||||
|
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
|
@ -20,22 +18,14 @@ while [ $# -gt 0 ]; do
|
||||||
DOMAIN=$2
|
DOMAIN=$2
|
||||||
shift 2
|
shift 2
|
||||||
;;
|
;;
|
||||||
--config)
|
|
||||||
CONFIG_FILE=$2
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
--platform)
|
|
||||||
PLATFORM=$2
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
--build-only)
|
|
||||||
BUILD_ONLY=1
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--skip-config-check)
|
--skip-config-check)
|
||||||
SKIP_CONFIG="--skip-config-check"
|
SKIP_CONFIG="--skip-config-check"
|
||||||
shift
|
shift
|
||||||
;;
|
;;
|
||||||
|
--build)
|
||||||
|
BUILD_MODE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "Unknown argument: $1"
|
echo "Unknown argument: $1"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -46,40 +36,36 @@ done
|
||||||
# Get the directory of the script
|
# Get the directory of the script
|
||||||
SCRIPT_DIR=$(dirname "$0")
|
SCRIPT_DIR=$(dirname "$0")
|
||||||
|
|
||||||
# Set GOOS and output filename
|
# List all Go files in the script directory (excluding test files)
|
||||||
if [ "$PLATFORM" = "windows" ]; then
|
GO_FILES=$(find "$SCRIPT_DIR" -name '*.go' ! -name '*_test.go' -print)
|
||||||
GOOS=windows
|
|
||||||
BUILD_OUTPUT="qgato.exe"
|
|
||||||
else
|
|
||||||
GOOS=linux
|
|
||||||
BUILD_OUTPUT="qgato"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Clean and build
|
if $BUILD_MODE; then
|
||||||
echo "Cleaning previous build..."
|
# Build mode
|
||||||
rm -f "$SCRIPT_DIR/$BUILD_OUTPUT"
|
echo "Building application..."
|
||||||
|
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
|
||||||
echo "Building application for $PLATFORM..."
|
|
||||||
GOOS=$GOOS go build -ldflags="-s -w" -o "$SCRIPT_DIR/$BUILD_OUTPUT" .
|
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
|
echo "Build successful! Output: $SCRIPT_DIR/$BUILD_OUTPUT"
|
||||||
else
|
else
|
||||||
echo "Build failed!"
|
echo "Build failed!"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
else
|
||||||
# Skip execution if build-only
|
# Run mode
|
||||||
if [ "$BUILD_ONLY" -eq 1 ]; then
|
CMD="./$BUILD_OUTPUT $SKIP_CONFIG"
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Construct the run command
|
|
||||||
CMD="$SCRIPT_DIR/$BUILD_OUTPUT $SKIP_CONFIG"
|
|
||||||
[ -n "$PORT" ] && CMD="$CMD --port $PORT"
|
[ -n "$PORT" ] && CMD="$CMD --port $PORT"
|
||||||
[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
|
[ -n "$DOMAIN" ] && CMD="$CMD --domain $DOMAIN"
|
||||||
[ -n "$CONFIG_FILE" ] && CMD="$CMD --config $CONFIG_FILE"
|
|
||||||
|
if [ ! -f "$SCRIPT_DIR/$BUILD_OUTPUT" ]; then
|
||||||
|
echo "Executable not found. Building it first..."
|
||||||
|
go build -o "$SCRIPT_DIR/$BUILD_OUTPUT" $GO_FILES
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "Build failed! Unable to run the application."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
echo "Starting application with command: $CMD"
|
echo "Starting application with command: $CMD"
|
||||||
|
|
||||||
# Run the built executable
|
# Run the executable
|
||||||
eval $CMD
|
eval $CMD
|
||||||
|
fi
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue