added caching of images to the drive
This commit is contained in:
parent
48994ee32d
commit
3d47c80446
11 changed files with 451 additions and 33 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,3 +1,4 @@
|
|||
config.json
|
||||
opensearch.xml
|
||||
config.ini
|
||||
image_cache/
|
223
cache-images.go
Normal file
223
cache-images.go
Normal file
|
@ -0,0 +1,223 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/gif"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/chai2010/webp"
|
||||
"golang.org/x/image/bmp"
|
||||
"golang.org/x/image/tiff"
|
||||
)
|
||||
|
||||
var (
|
||||
cachingImages = make(map[string]*sync.Mutex)
|
||||
cachingImagesMu sync.Mutex
|
||||
cachingSemaphore = make(chan struct{}, 10) // Limit to 10 concurrent downloads
|
||||
)
|
||||
|
||||
func cacheImage(imageURL, filename string) (string, error) {
|
||||
cacheDir := "image_cache"
|
||||
cachedImagePath := filepath.Join(cacheDir, filename)
|
||||
|
||||
// Check if the image is already cached
|
||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
||||
return cachedImagePath, nil
|
||||
}
|
||||
|
||||
// Ensure only one goroutine caches the same image
|
||||
cachingImagesMu.Lock()
|
||||
if _, exists := cachingImages[imageURL]; !exists {
|
||||
cachingImages[imageURL] = &sync.Mutex{}
|
||||
}
|
||||
mu := cachingImages[imageURL]
|
||||
cachingImagesMu.Unlock()
|
||||
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
// Double-check if the image was cached while waiting
|
||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
||||
return cachedImagePath, nil
|
||||
}
|
||||
|
||||
cachingSemaphore <- struct{}{} // Acquire a token
|
||||
defer func() { <-cachingSemaphore }() // Release the token
|
||||
|
||||
// Download the image
|
||||
resp, err := http.Get(imageURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Read the image data into a byte slice
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Detect the content type
|
||||
contentType := http.DetectContentType(data)
|
||||
|
||||
// If content type is HTML, skip caching
|
||||
if strings.HasPrefix(contentType, "text/html") {
|
||||
return "", fmt.Errorf("URL returned HTML content instead of an image: %s", imageURL)
|
||||
}
|
||||
|
||||
// Handle SVG files directly
|
||||
if contentType == "image/svg+xml" {
|
||||
// Ensure the cache directory exists
|
||||
if _, err := os.Stat(cacheDir); os.IsNotExist(err) {
|
||||
os.Mkdir(cacheDir, os.ModePerm)
|
||||
}
|
||||
|
||||
// Save the SVG file as-is
|
||||
err = os.WriteFile(cachedImagePath, data, 0644)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Clean up mutex
|
||||
cachingImagesMu.Lock()
|
||||
delete(cachingImages, imageURL)
|
||||
cachingImagesMu.Unlock()
|
||||
|
||||
return cachedImagePath, nil
|
||||
}
|
||||
|
||||
// Decode the image based on the content type
|
||||
var img image.Image
|
||||
switch contentType {
|
||||
case "image/jpeg":
|
||||
img, err = jpeg.Decode(bytes.NewReader(data))
|
||||
case "image/png":
|
||||
img, err = png.Decode(bytes.NewReader(data))
|
||||
case "image/gif":
|
||||
img, err = gif.Decode(bytes.NewReader(data))
|
||||
case "image/webp":
|
||||
img, err = webp.Decode(bytes.NewReader(data))
|
||||
case "image/bmp":
|
||||
img, err = bmp.Decode(bytes.NewReader(data))
|
||||
case "image/tiff":
|
||||
img, err = tiff.Decode(bytes.NewReader(data))
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported image type: %s", contentType)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Ensure the cache directory exists
|
||||
if _, err := os.Stat(cacheDir); os.IsNotExist(err) {
|
||||
os.Mkdir(cacheDir, os.ModePerm)
|
||||
}
|
||||
|
||||
// Open the cached file for writing
|
||||
outFile, err := os.Create(cachedImagePath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
// Encode the image to WebP and save
|
||||
options := &webp.Options{Lossless: false, Quality: 80}
|
||||
err = webp.Encode(outFile, img, options)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Clean up mutex
|
||||
cachingImagesMu.Lock()
|
||||
delete(cachingImages, imageURL)
|
||||
cachingImagesMu.Unlock()
|
||||
|
||||
return cachedImagePath, nil
|
||||
}
|
||||
|
||||
func handleCachedImages(w http.ResponseWriter, r *http.Request) {
|
||||
imageName := filepath.Base(r.URL.Path)
|
||||
cacheDir := "image_cache"
|
||||
cachedImagePath := filepath.Join(cacheDir, imageName)
|
||||
|
||||
if _, err := os.Stat(cachedImagePath); os.IsNotExist(err) {
|
||||
// Serve placeholder image with no-store headers
|
||||
placeholderPath := "static/images/placeholder.webp"
|
||||
placeholderContentType := "image/webp"
|
||||
|
||||
// You can also check for SVG placeholder if needed
|
||||
if strings.HasSuffix(imageName, ".svg") {
|
||||
placeholderPath = "static/images/placeholder.svg"
|
||||
placeholderContentType = "image/svg+xml"
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", placeholderContentType)
|
||||
w.Header().Set("Cache-Control", "no-store, must-revalidate")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
http.ServeFile(w, r, placeholderPath)
|
||||
return
|
||||
}
|
||||
|
||||
// Determine the content type based on the file extension
|
||||
extension := strings.ToLower(filepath.Ext(cachedImagePath))
|
||||
var contentType string
|
||||
switch extension {
|
||||
case ".svg":
|
||||
contentType = "image/svg+xml"
|
||||
case ".jpg", ".jpeg":
|
||||
contentType = "image/jpeg"
|
||||
case ".png":
|
||||
contentType = "image/png"
|
||||
case ".gif":
|
||||
contentType = "image/gif"
|
||||
case ".webp":
|
||||
contentType = "image/webp"
|
||||
default:
|
||||
// Default to binary stream if unknown
|
||||
contentType = "application/octet-stream"
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Cache-Control", "public, max-age=31536000") // Cache the image for 1 year
|
||||
http.ServeFile(w, r, cachedImagePath)
|
||||
}
|
||||
|
||||
func handleImageStatus(w http.ResponseWriter, r *http.Request) {
|
||||
imageIDs := r.URL.Query().Get("image_ids")
|
||||
ids := strings.Split(imageIDs, ",")
|
||||
|
||||
statusMap := make(map[string]string)
|
||||
|
||||
cacheDir := "image_cache"
|
||||
|
||||
printDebug("Received image status request for IDs: %v", ids)
|
||||
printDebug("Status map: %v", statusMap)
|
||||
|
||||
for _, id := range ids {
|
||||
filename := id + ".webp"
|
||||
cachedImagePath := filepath.Join(cacheDir, filename)
|
||||
|
||||
if _, err := os.Stat(cachedImagePath); err == nil {
|
||||
// Image is cached and ready
|
||||
statusMap[id] = "/image_cache/" + filename
|
||||
} else {
|
||||
// Image is not ready
|
||||
statusMap[id] = ""
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(statusMap)
|
||||
}
|
1
cache.go
1
cache.go
|
@ -25,6 +25,7 @@ type TextSearchResult struct {
|
|||
}
|
||||
|
||||
type ImageSearchResult struct {
|
||||
ID string
|
||||
Thumbnail string
|
||||
Title string
|
||||
Media string
|
||||
|
|
18
config.go
18
config.go
|
@ -7,6 +7,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
"gopkg.in/ini.v1"
|
||||
|
@ -78,6 +79,7 @@ func saveConfig(config Config) {
|
|||
sec.Key("CrawlerEnabled").SetValue(strconv.FormatBool(config.CrawlerEnabled))
|
||||
sec.Key("WebsiteEnabled").SetValue(strconv.FormatBool(config.WebsiteEnabled))
|
||||
sec.Key("LogLevel").SetValue(strconv.Itoa(config.LogLevel))
|
||||
sec.Key("HardCacheDuration").SetValue(config.HardCacheDuration.String())
|
||||
|
||||
err := cfg.SaveTo(configFilePath)
|
||||
if err != nil {
|
||||
|
@ -130,6 +132,21 @@ func loadConfig() Config {
|
|||
logLevel = 1
|
||||
}
|
||||
|
||||
// Read HardCacheDuration
|
||||
hardCacheStr := cfg.Section("").Key("HardCacheDuration").String()
|
||||
var hardCacheDuration time.Duration
|
||||
if hardCacheStr != "" {
|
||||
duration, err := time.ParseDuration(hardCacheStr)
|
||||
if err != nil {
|
||||
printWarn("Invalid HardCacheDuration format, defaulting to 0: %v", err)
|
||||
hardCacheDuration = 0
|
||||
} else {
|
||||
hardCacheDuration = duration
|
||||
}
|
||||
} else {
|
||||
hardCacheDuration = 0 // Default to 0 if not set
|
||||
}
|
||||
|
||||
config = Config{
|
||||
Port: port,
|
||||
AuthCode: cfg.Section("").Key("AuthCode").String(),
|
||||
|
@ -140,6 +157,7 @@ func loadConfig() Config {
|
|||
CrawlerEnabled: crawlerEnabled,
|
||||
WebsiteEnabled: websiteEnabled,
|
||||
LogLevel: logLevel,
|
||||
HardCacheDuration: hardCacheDuration,
|
||||
}
|
||||
|
||||
return config
|
||||
|
|
2
go.mod
2
go.mod
|
@ -27,6 +27,6 @@ require (
|
|||
github.com/disintegration/imaging v1.6.2 // indirect
|
||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||
github.com/leonelquinteros/gotext v1.7.0 // indirect
|
||||
golang.org/x/image v0.20.0 // indirect
|
||||
golang.org/x/image v0.21.0 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
)
|
||||
|
|
2
go.sum
2
go.sum
|
@ -41,6 +41,8 @@ golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3
|
|||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.20.0 h1:7cVCUjQwfL18gyBJOmYvptfSHS8Fb3YUDtfLIZ7Nbpw=
|
||||
golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM=
|
||||
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
|
||||
golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
|
|
33
images.go
33
images.go
|
@ -1,6 +1,8 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
@ -96,16 +98,41 @@ func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
|||
}
|
||||
|
||||
for _, result := range searchResults {
|
||||
results = append(results, result.(ImageSearchResult))
|
||||
imageResult := result.(ImageSearchResult)
|
||||
if config.HardCacheDuration > 0 {
|
||||
// Save the original Media URL before overwriting
|
||||
originalMediaURL := imageResult.Media
|
||||
|
||||
// Generate hash from the original media URL
|
||||
hasher := md5.New()
|
||||
hasher.Write([]byte(originalMediaURL))
|
||||
hash := hex.EncodeToString(hasher.Sum(nil))
|
||||
filename := hash + ".webp"
|
||||
|
||||
// Set the Media URL to point to the cached image path
|
||||
cacheURL := "/image_cache/" + filename
|
||||
imageResult.Media = cacheURL
|
||||
imageResult.ThumbProxy = cacheURL
|
||||
|
||||
// Assign the ID
|
||||
imageResult.ID = hash
|
||||
|
||||
// Start caching in the background
|
||||
go func(originalURL, filename string) {
|
||||
_, err := cacheImage(originalURL, filename)
|
||||
if err != nil {
|
||||
printWarn("Failed to cache image %s: %v", originalURL, err)
|
||||
}
|
||||
}(originalMediaURL, filename)
|
||||
}
|
||||
results = append(results, imageResult)
|
||||
}
|
||||
|
||||
// If results are found, break out of the loop
|
||||
if len(results) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no results found after trying all engines
|
||||
if len(results) == 0 {
|
||||
printWarn("No image results found for query: %s, trying other nodes", query)
|
||||
results = tryOtherNodesForImageSearch(query, safe, lang, page, []string{hostID})
|
||||
|
|
2
init.go
2
init.go
|
@ -14,6 +14,7 @@ type Config struct {
|
|||
CrawlerEnabled bool
|
||||
WebsiteEnabled bool
|
||||
LogLevel int
|
||||
HardCacheDuration time.Duration
|
||||
}
|
||||
|
||||
var defaultConfig = Config{
|
||||
|
@ -25,6 +26,7 @@ var defaultConfig = Config{
|
|||
CrawlerEnabled: true,
|
||||
WebsiteEnabled: true,
|
||||
LogLevel: 1,
|
||||
HardCacheDuration: 0,
|
||||
}
|
||||
|
||||
const configFilePath = "config.ini"
|
||||
|
|
2
main.go
2
main.go
|
@ -204,6 +204,8 @@ func runServer() {
|
|||
http.HandleFunc("/node", handleNodeRequest)
|
||||
http.HandleFunc("/settings", handleSettings)
|
||||
http.HandleFunc("/save-settings", handleSaveSettings)
|
||||
http.HandleFunc("/image_cache/", handleCachedImages)
|
||||
http.HandleFunc("/image_status", handleImageStatus)
|
||||
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
||||
http.ServeFile(w, r, "static/opensearch.xml")
|
||||
|
|
17
static/images/placeholder.svg
Normal file
17
static/images/placeholder.svg
Normal file
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 32 32" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
|
||||
|
||||
<title>image-picture</title>
|
||||
<desc>Created with Sketch Beta.</desc>
|
||||
<defs>
|
||||
|
||||
</defs>
|
||||
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
|
||||
<g id="Icon-Set-Filled" sketch:type="MSLayerGroup" transform="translate(-362.000000, -101.000000)" fill="#000000">
|
||||
<path d="M392,129 C392,130.104 391.104,131 390,131 L384.832,131 L377.464,123.535 L386,114.999 L392,120.999 L392,129 L392,129 Z M366,131 C364.896,131 364,130.104 364,129 L364,128.061 L371.945,120.945 L382.001,131 L366,131 L366,131 Z M370,105 C372.209,105 374,106.791 374,109 C374,111.209 372.209,113 370,113 C367.791,113 366,111.209 366,109 C366,106.791 367.791,105 370,105 L370,105 Z M390,101 L366,101 C363.791,101 362,102.791 362,105 L362,129 C362,131.209 363.791,133 366,133 L390,133 C392.209,133 394,131.209 394,129 L394,105 C394,102.791 392.209,101 390,101 L390,101 Z M370,111 C371.104,111 372,110.104 372,109 C372,107.896 371.104,107 370,107 C368.896,107 368,107.896 368,109 C368,110.104 368.896,111 370,111 L370,111 Z" id="image-picture" sketch:type="MSShapeGroup">
|
||||
|
||||
</path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.5 KiB |
|
@ -74,7 +74,13 @@
|
|||
<!-- Images Grid -->
|
||||
{{ range $index, $result := .Results }}
|
||||
<div class="image">
|
||||
<img src="{{ .ThumbProxy }}" alt="{{ .Title }}" data-media="{{ .Media }}" class="clickable">
|
||||
<img
|
||||
src="/static/images/placeholder.svg"
|
||||
data-id="{{ $result.ID }}"
|
||||
alt="{{ .Title }}"
|
||||
data-media="{{ .Media }}"
|
||||
class="clickable"
|
||||
>
|
||||
<div class="resolution">{{ .Width }} × {{ .Height }}</div>
|
||||
<div class="details">
|
||||
<span class="img_title clickable">{{ .Title }}</span>
|
||||
|
@ -216,6 +222,125 @@
|
|||
});
|
||||
});
|
||||
</script>
|
||||
<!-- JavaScript to Load Images -->
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
let imageMap = {}; // Map of image IDs to img elements
|
||||
let loadedImageIDs = new Set(); // Keep track of loaded image IDs
|
||||
let pollingInterval = 2000; // Initial polling interval in milliseconds
|
||||
let polling = false;
|
||||
|
||||
function initializeImages() {
|
||||
const images = document.querySelectorAll("img[data-id]");
|
||||
images.forEach((img) => {
|
||||
const id = img.dataset.id;
|
||||
if (!imageMap[id]) {
|
||||
imageMap[id] = img;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize with images present at page load
|
||||
initializeImages();
|
||||
|
||||
// Set up MutationObserver to detect new images added to the DOM
|
||||
const observer = new MutationObserver((mutationsList) => {
|
||||
for (let mutation of mutationsList) {
|
||||
if (mutation.type === 'childList') {
|
||||
mutation.addedNodes.forEach((node) => {
|
||||
if (node.nodeType === Node.ELEMENT_NODE) {
|
||||
if (node.matches && node.matches('img[data-id]')) {
|
||||
const img = node;
|
||||
const id = img.dataset.id;
|
||||
if (!imageMap[id]) {
|
||||
imageMap[id] = img;
|
||||
console.log('New image added:', id);
|
||||
if (!polling) {
|
||||
checkImageStatus(); // Start polling if not already started
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check for nested images within added nodes
|
||||
const nestedImages = node.querySelectorAll && node.querySelectorAll('img[data-id]');
|
||||
if (nestedImages && nestedImages.length > 0) {
|
||||
nestedImages.forEach((img) => {
|
||||
const id = img.dataset.id;
|
||||
if (!imageMap[id]) {
|
||||
imageMap[id] = img;
|
||||
console.log('New nested image added:', id);
|
||||
if (!polling) {
|
||||
checkImageStatus(); // Start polling if not already started
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start observing the document body for added nodes
|
||||
observer.observe(document.body, { childList: true, subtree: true });
|
||||
|
||||
function checkImageStatus() {
|
||||
polling = true;
|
||||
const imageIDs = Object.keys(imageMap).filter(id => !loadedImageIDs.has(id));
|
||||
if (imageIDs.length === 0) {
|
||||
polling = false;
|
||||
console.log('All images loaded.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Checking status for images:', imageIDs); // Debugging
|
||||
|
||||
fetch('/image_status?image_ids=' + imageIDs.join(','))
|
||||
.then(response => response.json())
|
||||
.then(statusMap => {
|
||||
console.log('Status map:', statusMap); // Debugging
|
||||
let imagesStillLoading = false;
|
||||
|
||||
for (const [id, url] of Object.entries(statusMap)) {
|
||||
const img = imageMap[id];
|
||||
if (url) {
|
||||
// Append cache-busting query parameter
|
||||
const cacheBustingUrl = url + '?t=' + new Date().getTime();
|
||||
if (img.src !== cacheBustingUrl) {
|
||||
img.src = cacheBustingUrl;
|
||||
img.onload = function() {
|
||||
// Image loaded successfully
|
||||
img.classList.add('loaded');
|
||||
loadedImageIDs.add(id);
|
||||
};
|
||||
img.onerror = function() {
|
||||
console.error('Failed to load image:', url);
|
||||
};
|
||||
}
|
||||
} else {
|
||||
imagesStillLoading = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (imagesStillLoading) {
|
||||
// Poll again after a delay
|
||||
setTimeout(checkImageStatus, pollingInterval);
|
||||
} else {
|
||||
polling = false;
|
||||
console.log('All images loaded.');
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error checking image status:', error);
|
||||
// Retry after a delay in case of error
|
||||
setTimeout(checkImageStatus, pollingInterval * 2);
|
||||
});
|
||||
}
|
||||
|
||||
// Start polling
|
||||
checkImageStatus();
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
// Check if JavaScript is enabled and modify the DOM accordingly
|
||||
document.getElementById('content').classList.remove('js-enabled');
|
||||
|
|
Loading…
Add table
Reference in a new issue