changed to golang
This commit is contained in:
parent
f904f731f3
commit
89687eecd6
12 changed files with 867 additions and 484 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -2,3 +2,6 @@
|
||||||
/patches
|
/patches
|
||||||
/packages.json
|
/packages.json
|
||||||
/packages_temp.json
|
/packages_temp.json
|
||||||
|
/APKINDEX
|
||||||
|
/APPINDEX
|
||||||
|
/browser-amd64-nightly-linux.tar.gz
|
|
@ -1,18 +0,0 @@
|
||||||
C:c87e614f4e4716fa4c68a7dfc62bf77f10dfc4bd
|
|
||||||
P:=mypackage
|
|
||||||
V:=1.0.0
|
|
||||||
A:=x86_64
|
|
||||||
S:=123456
|
|
||||||
I:=123456
|
|
||||||
T:=My package description
|
|
||||||
U:=https://example.com
|
|
||||||
L:=GPL-2.0-only
|
|
||||||
o:=mypackag5e
|
|
||||||
m:=Maintain <maintainer@example.com>
|
|
||||||
t:1722785057
|
|
||||||
c:c87e614f4e4716fa4c68a7dfc62bf77f10dfc4bd
|
|
||||||
D:=dependency1>=1.3.0
|
|
||||||
p:=mypackage-=1.0.0.apk
|
|
||||||
q:
|
|
||||||
Z:c87e614f4e4716fa4c68a7dfc62bf77f10dfc4bd
|
|
||||||
|
|
195
builder.sh
195
builder.sh
|
@ -1,195 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Define source paths
|
|
||||||
SOURCE_PATH="./mozilla-central"
|
|
||||||
PATCHES_DIR="$SOURCE_PATH/Spitfire"
|
|
||||||
SOURCE_REPO="https://hg.mozilla.org/mozilla-central"
|
|
||||||
PATCHES_REPO="https://weforgecode.xyz/Spitfire/Browser.git"
|
|
||||||
|
|
||||||
SOURCE_PATH=$(realpath "$SOURCE_PATH") || { echo "Failed to resolve SOURCE_PATH. Exiting."; exit 1; }
|
|
||||||
PATCHES_DIR=$(realpath "$PATCHES_DIR") || { echo "Failed to resolve PATCHES_DIR. Exiting."; exit 1; }
|
|
||||||
|
|
||||||
# Array to store errors
|
|
||||||
errors=()
|
|
||||||
|
|
||||||
# Function to download Mozilla source if not present
|
|
||||||
download_source() {
|
|
||||||
if [ ! -d "$SOURCE_PATH" ]; then
|
|
||||||
echo "Mozilla source not found. Cloning from repository..."
|
|
||||||
hg clone "$SOURCE_REPO" "$SOURCE_PATH" || errors+=("Failed to clone Mozilla repository.")
|
|
||||||
else
|
|
||||||
echo "Mozilla source already exists."
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to discard uncommitted changes
|
|
||||||
discard_changes() {
|
|
||||||
echo "Discarding uncommitted changes..."
|
|
||||||
hg revert --all --no-backup -R "$SOURCE_PATH" || errors+=("Failed to revert changes in Mozilla repository.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to clean build
|
|
||||||
clean_build() {
|
|
||||||
echo "Cleaning build..."
|
|
||||||
cd "$SOURCE_PATH" || { errors+=("Failed to navigate to source directory."); return; }
|
|
||||||
hg revert --all --no-backup || errors+=("Failed to revert changes in Mozilla repository.")
|
|
||||||
./mach clobber || errors+=("Failed to clean build.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to update Mozilla repository
|
|
||||||
update_repo() {
|
|
||||||
echo "Updating Mozilla repository..."
|
|
||||||
cd "$SOURCE_PATH" || { errors+=("Failed to navigate to source directory."); return; }
|
|
||||||
hg pull -u || errors+=("Failed to update Mozilla repository.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to update patches
|
|
||||||
update_patches() {
|
|
||||||
echo "Updating patches..."
|
|
||||||
if [ -d "$PATCHES_DIR" ]; then
|
|
||||||
echo "Patches directory already exists. Cleaning and pulling updates..."
|
|
||||||
cd "$PATCHES_DIR" || { errors+=("Failed to navigate to patches directory."); return; }
|
|
||||||
git clean -xdf || errors+=("Failed to clean patches directory.")
|
|
||||||
|
|
||||||
git stash push --include-untracked || echo "No local changes to save."
|
|
||||||
|
|
||||||
git fetch || errors+=("Failed to fetch updates from patches repository.")
|
|
||||||
|
|
||||||
if git show-ref --verify --quiet refs/heads/main; then
|
|
||||||
git rebase origin/main || errors+=("Failed to rebase updates from main branch.")
|
|
||||||
elif git show-ref --verify --quiet refs/heads/master; then
|
|
||||||
git rebase origin/master || errors+=("Failed to rebase updates from master branch.")
|
|
||||||
else
|
|
||||||
errors+=("No valid branch (main or master) found in patches repository.")
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if there are any stashes before popping
|
|
||||||
if git stash list | grep -q 'stash@{0}'; then
|
|
||||||
git stash pop || errors+=("Failed to apply and drop stashed changes.")
|
|
||||||
else
|
|
||||||
echo "No stash entries found, skipping pop."
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Patches directory does not exist. Cloning repository..."
|
|
||||||
git clone "$PATCHES_REPO" "$PATCHES_DIR" || errors+=("Failed to clone patches repository.")
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Copying files from patches directory to Firefox source directory..."
|
|
||||||
rsync -av --exclude='.git' "$PATCHES_DIR/" "$SOURCE_PATH/" || errors+=("Failed to copy files.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to configure Spitfire
|
|
||||||
configure() {
|
|
||||||
echo "Configuring Spitfire..."
|
|
||||||
cd "$SOURCE_PATH" || { errors+=("Failed to navigate to source directory."); return; }
|
|
||||||
./mach configure || errors+=("Configuration failed.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to build Spitfire
|
|
||||||
build() {
|
|
||||||
echo "Building Spitfire..."
|
|
||||||
cd "$SOURCE_PATH" || { errors+=("Failed to navigate to source directory."); return; }
|
|
||||||
./mach build || errors+=("Build failed.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to run the project after build
|
|
||||||
run_project() {
|
|
||||||
echo "Running the project..."
|
|
||||||
cd "$SOURCE_PATH" || { errors+=("Failed to navigate to browser directory."); return; }
|
|
||||||
./mach run || errors+=("Failed to run the project.")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to print usage instructions
|
|
||||||
print_help() {
|
|
||||||
echo "Usage: ./builder.sh [options]"
|
|
||||||
echo "Options:"
|
|
||||||
echo " -a, --all : Perform all steps (build, clean, update)"
|
|
||||||
echo " -b, --build : Build Spitfire"
|
|
||||||
echo " -c, --clean : Clean build"
|
|
||||||
echo " -u, --update : Update Mozilla repository"
|
|
||||||
echo " -p, --patches : Update patches"
|
|
||||||
echo " -r, --run : Run the project after build using mach run in the browser directory"
|
|
||||||
echo " -h, --help : Display this help message"
|
|
||||||
exit 0
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse command line arguments
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
key="$1"
|
|
||||||
case $key in
|
|
||||||
-a|--all)
|
|
||||||
all=true
|
|
||||||
;;
|
|
||||||
-b|--build)
|
|
||||||
build=true
|
|
||||||
;;
|
|
||||||
-c|--clean)
|
|
||||||
clean=true
|
|
||||||
;;
|
|
||||||
-u|--update)
|
|
||||||
update=true
|
|
||||||
;;
|
|
||||||
-p|--patches)
|
|
||||||
patches=true
|
|
||||||
;;
|
|
||||||
-r|--run)
|
|
||||||
run=true
|
|
||||||
;;
|
|
||||||
-h|--help)
|
|
||||||
print_help
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "Invalid option: $key"
|
|
||||||
print_help
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
shift
|
|
||||||
done
|
|
||||||
|
|
||||||
# Main script execution based on flags
|
|
||||||
if [ "$all" = true ]; then
|
|
||||||
download_source
|
|
||||||
discard_changes
|
|
||||||
clean_build
|
|
||||||
update_repo
|
|
||||||
update_patches
|
|
||||||
configure
|
|
||||||
build
|
|
||||||
if [ "$run" = true ]; then
|
|
||||||
run_project
|
|
||||||
fi
|
|
||||||
echo "Spitfire build completed successfully."
|
|
||||||
elif [ "$build" = true ]; then
|
|
||||||
configure
|
|
||||||
build
|
|
||||||
if [ "$run" = true ]; then
|
|
||||||
run_project
|
|
||||||
fi
|
|
||||||
echo "Spitfire build completed successfully."
|
|
||||||
elif [ "$clean" = true ]; then
|
|
||||||
clean_build
|
|
||||||
echo "Cleaned Firefox build."
|
|
||||||
elif [ "$update" = true ]; then
|
|
||||||
download_source
|
|
||||||
update_repo
|
|
||||||
echo "Mozilla repository updated."
|
|
||||||
elif [ "$patches" = true ]; then
|
|
||||||
download_source
|
|
||||||
update_patches
|
|
||||||
echo "Patches updated."
|
|
||||||
elif [ "$run" = true ]; then
|
|
||||||
run_project
|
|
||||||
else
|
|
||||||
print_help
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Print all collected errors at the end
|
|
||||||
if [ ${#errors[@]} -ne 0 ]; then
|
|
||||||
echo "The following errors occurred during execution:"
|
|
||||||
for error in "${errors[@]}"; do
|
|
||||||
echo "- $error"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 0
|
|
3
go.mod
Normal file
3
go.mod
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
module spitfire
|
||||||
|
|
||||||
|
go 1.18
|
260
main.go
Normal file
260
main.go
Normal file
|
@ -0,0 +1,260 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime" // for detecting system architecture and platform
|
||||||
|
"time"
|
||||||
|
"spitfire/spitfire"
|
||||||
|
//"errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Define all flags as package-level variables
|
||||||
|
all bool
|
||||||
|
buildFlag bool
|
||||||
|
clean bool
|
||||||
|
update bool
|
||||||
|
patches bool
|
||||||
|
run bool
|
||||||
|
compress bool
|
||||||
|
buildPath string
|
||||||
|
target string
|
||||||
|
version string
|
||||||
|
component string
|
||||||
|
arch string
|
||||||
|
release string
|
||||||
|
platform string
|
||||||
|
upload bool
|
||||||
|
uploadPath string
|
||||||
|
sourceRepo = "https://hg.mozilla.org/mozilla-central"
|
||||||
|
patchesRepo = "https://weforgecode.xyz/Spitfire/Browser.git"
|
||||||
|
url = "https://spitfirebrowser.com/"
|
||||||
|
licence = "AGPL-3.0"
|
||||||
|
name = "Spitfire"
|
||||||
|
maintainer = "Internet Addict"
|
||||||
|
initialDir string
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
flag.StringVar(&buildPath, "p", "", "Path to the build directory")
|
||||||
|
flag.StringVar(&target, "t", "", "Target location format: component-arch-release-platform")
|
||||||
|
flag.BoolVar(&compress, "c", false, "Compress the build directory into a tar.gz file before uploading")
|
||||||
|
flag.StringVar(&version, "v", "", "Specify version for the package. For nightly, use current date if not specified.")
|
||||||
|
flag.StringVar(&component, "component", "browser", "Component name (default: browser)")
|
||||||
|
flag.StringVar(&arch, "arch", runtime.GOARCH, "Architecture (default: system architecture)")
|
||||||
|
flag.StringVar(&release, "release", "nightly", "Release type (default: nightly)")
|
||||||
|
flag.StringVar(&platform, "platform", runtime.GOOS, "Platform (default: system platform)")
|
||||||
|
flag.BoolVar(&all, "a", false, "Perform all steps (build, clean, update)")
|
||||||
|
flag.BoolVar(&buildFlag, "b", false, "Build Spitfire")
|
||||||
|
flag.BoolVar(&clean, "clean", false, "Clean build")
|
||||||
|
flag.BoolVar(&update, "u", false, "Update Mozilla repository")
|
||||||
|
flag.BoolVar(&patches, "patches", false, "Update patches")
|
||||||
|
flag.BoolVar(&run, "r", false, "Run the project after build")
|
||||||
|
flag.BoolVar(&upload, "upload", false, "Upload the compressed build file to SourceForge")
|
||||||
|
flag.StringVar(&uploadPath, "upload-path", "", "Path to the file to upload if no build present")
|
||||||
|
flag.Bool("h", false, "Display help message")
|
||||||
|
}
|
||||||
|
|
||||||
|
func printHelp() {
|
||||||
|
fmt.Println("Usage: ./main -p=<path-to-build> -t=<target> [-c|--compress] [-v|--version=<version>] [-component=<component>] [-arch=<architecture>] [-release=<release>] [-platform=<platform>]")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
fmt.Println("Example: go run . --upload -c --upload-path=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -a")
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if flag.Lookup("h").Value.(flag.Getter).Get().(bool) {
|
||||||
|
printHelp()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set version to current date if it's empty and release is nightly
|
||||||
|
if version == "" && release == "nightly" {
|
||||||
|
version = time.Now().Format("2006.01.02") // Set version to current date if nightly
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the initial directory
|
||||||
|
var err error
|
||||||
|
initialDir, err = os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to get current working directory: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Initial working directory: %s\n", initialDir)
|
||||||
|
|
||||||
|
if all || buildFlag {
|
||||||
|
BuildProcess()
|
||||||
|
}
|
||||||
|
|
||||||
|
if compress || upload {
|
||||||
|
PackageAndUploadProcess()
|
||||||
|
}
|
||||||
|
|
||||||
|
spitfire.PrintErrors()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildProcess handles the build process: downloading, cleaning, configuring, and building the project.
|
||||||
|
func BuildProcess() {
|
||||||
|
sourcePath, err := spitfire.ResolvePath("./mozilla-central")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error resolving source path: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
patchesDir, err := spitfire.ResolvePath(filepath.Join(sourcePath, "Spitfire"))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error resolving patches directory: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if all {
|
||||||
|
spitfire.DownloadSource(sourcePath, sourceRepo)
|
||||||
|
spitfire.DiscardChanges(sourcePath)
|
||||||
|
spitfire.CleanBuild(sourcePath)
|
||||||
|
spitfire.UpdateRepo(sourcePath)
|
||||||
|
spitfire.UpdatePatches(patchesDir, patchesRepo, sourcePath)
|
||||||
|
spitfire.Configure(sourcePath)
|
||||||
|
spitfire.Build(sourcePath)
|
||||||
|
if run {
|
||||||
|
spitfire.RunProject(sourcePath)
|
||||||
|
}
|
||||||
|
fmt.Println("Spitfire build completed successfully.")
|
||||||
|
} else if clean {
|
||||||
|
spitfire.CleanBuild(sourcePath)
|
||||||
|
fmt.Println("Cleaned Firefox build.")
|
||||||
|
} else if update {
|
||||||
|
spitfire.DownloadSource(sourcePath, sourceRepo)
|
||||||
|
spitfire.UpdateRepo(sourcePath)
|
||||||
|
fmt.Println("Mozilla repository updated.")
|
||||||
|
} else if patches {
|
||||||
|
spitfire.DownloadSource(sourcePath, sourceRepo)
|
||||||
|
spitfire.UpdatePatches(patchesDir, patchesRepo, sourcePath)
|
||||||
|
fmt.Println("Patches updated.")
|
||||||
|
} else if buildFlag {
|
||||||
|
spitfire.Configure(sourcePath)
|
||||||
|
spitfire.Build(sourcePath)
|
||||||
|
if run {
|
||||||
|
spitfire.RunProject(sourcePath)
|
||||||
|
}
|
||||||
|
fmt.Println("Spitfire build completed successfully.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// PackageAndUploadProcess handles compressing, packaging, and uploading the build to SourceForge.
|
||||||
|
func PackageAndUploadProcess() {
|
||||||
|
|
||||||
|
// Restore working directory before performing SourceForge operations
|
||||||
|
restoreWorkingDirectory()
|
||||||
|
|
||||||
|
pathToUse := buildPath
|
||||||
|
if upload && uploadPath != "" {
|
||||||
|
pathToUse = uploadPath
|
||||||
|
}
|
||||||
|
|
||||||
|
if pathToUse == "" {
|
||||||
|
log.Fatalf("Error: no valid build or upload path provided.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// // This is stupid, it wait for the path to exist (up to a maximum wait time)
|
||||||
|
// err := waitForPath(pathToUse, 60, 5) // Max 60 seconds, checking every 5 seconds
|
||||||
|
// if err != nil {
|
||||||
|
// log.Fatalf("Error: Build path or upload path not found: %v", err)
|
||||||
|
// }
|
||||||
|
|
||||||
|
uncompressedSize, err := spitfire.GetDirectorySize(pathToUse)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to calculate uncompressed size: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Uncompressed directory size: %d bytes\n", uncompressedSize)
|
||||||
|
|
||||||
|
outputCompressedFile := filepath.Join(".", fmt.Sprintf("%s-%s-%s-%s.tar.gz", component, arch, release, platform))
|
||||||
|
if compress {
|
||||||
|
err := spitfire.CompressDirectory(pathToUse, outputCompressedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to compress build directory: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Build directory compressed to: %s\n", outputCompressedFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
compressedSize, err := spitfire.GetFileSize(outputCompressedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to get compressed file size: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Compressed file size: %d bytes\n", compressedSize)
|
||||||
|
|
||||||
|
if upload {
|
||||||
|
config, err := spitfire.LoadConfig()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to load SourceForge config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(outputCompressedFile); err == nil {
|
||||||
|
err = spitfire.Upload(config, outputCompressedFile, "/home/frs/project/spitfire-browser/"+component+"/"+arch+"/"+release+"/"+version+"/")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to upload compressed file: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Println("Compressed file uploaded successfully.")
|
||||||
|
} else {
|
||||||
|
log.Fatalf("No compressed file found to upload.")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = spitfire.DownloadAPPINDEX(config, "/home/frs/project/spitfire-browser/")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Failed to download APPINDEX. A new APPINDEX will be created and uploaded.")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = spitfire.PackageAPPINDEX(
|
||||||
|
name, release, version, arch,
|
||||||
|
fmt.Sprintf("%d", compressedSize),
|
||||||
|
fmt.Sprintf("%d", uncompressedSize),
|
||||||
|
"Spitfire build", url, licence, component, maintainer, "", platform,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to update APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Println("APPINDEX updated successfully.")
|
||||||
|
|
||||||
|
if err := spitfire.CleanAppIndex(); err != nil {
|
||||||
|
log.Fatalf("Failed to clean APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = spitfire.UploadAPPINDEX(config, "/home/frs/project/spitfire-browser/")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to upload updated APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Println("APPINDEX uploaded successfully.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// // waitForPath checks if a path exists, waiting for up to maxWait seconds and retrying every interval seconds.
|
||||||
|
// func waitForPath(path string, maxWait int, interval int) error {
|
||||||
|
// waited := 0
|
||||||
|
// for {
|
||||||
|
// if PathExists(path) {
|
||||||
|
// return nil // Path exists
|
||||||
|
// }
|
||||||
|
// if waited >= maxWait {
|
||||||
|
// return errors.New("path does not exist after waiting")
|
||||||
|
// }
|
||||||
|
// fmt.Printf("Waiting for path %s to exist...\n", path)
|
||||||
|
// time.Sleep(time.Duration(interval) * time.Second)
|
||||||
|
// waited += interval
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // PathExists checks if the path exists
|
||||||
|
// func PathExists(path string) bool {
|
||||||
|
// _, err := os.Stat(path)
|
||||||
|
// return !os.IsNotExist(err)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// restoreWorkingDirectory restores the initial working directory after any operation that might change it.
|
||||||
|
func restoreWorkingDirectory() {
|
||||||
|
err := os.Chdir(initialDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to restore the working directory: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Restored working directory to: %s\n", initialDir)
|
||||||
|
}
|
100
package.sh
100
package.sh
|
@ -1,100 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Output APKINDEX file
|
|
||||||
OUTPUT_FILE="./APKINDEX"
|
|
||||||
|
|
||||||
# Function to calculate a mock checksum
|
|
||||||
calc_checksum() {
|
|
||||||
echo -n "$1" | sha1sum | awk '{ print $1 }'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse command-line arguments
|
|
||||||
while getopts "P:V:A:S:I:T:U:L:o:m:D:" opt; do
|
|
||||||
case ${opt} in
|
|
||||||
P) name="$OPTARG" ;;
|
|
||||||
V) version="$OPTARG" ;;
|
|
||||||
A) arch="$OPTARG" ;;
|
|
||||||
S) size="$OPTARG" ;;
|
|
||||||
I) installed_size="$OPTARG" ;;
|
|
||||||
T) description="$OPTARG" ;;
|
|
||||||
U) url="$OPTARG" ;;
|
|
||||||
L) license="$OPTARG" ;;
|
|
||||||
o) origin="$OPTARG" ;;
|
|
||||||
m) maintainer="$OPTARG" ;;
|
|
||||||
D) dependencies="$OPTARG" ;;
|
|
||||||
\?) echo "Invalid option: $OPTARG" 1>&2; exit 1 ;;
|
|
||||||
:) echo "Invalid option: $OPTARG requires an argument" 1>&2; exit 1 ;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# If no parameters are provided, prompt for input
|
|
||||||
if [ -z "$name" ]; then
|
|
||||||
read -p "Enter package name: " name
|
|
||||||
fi
|
|
||||||
if [ -z "$version" ]; then
|
|
||||||
read -p "Enter package version: " version
|
|
||||||
fi
|
|
||||||
if [ -z "$arch" ]; then
|
|
||||||
arch="x86_64"
|
|
||||||
fi
|
|
||||||
if [ -z "$size" ]; then
|
|
||||||
read -p "Enter package size: " size
|
|
||||||
fi
|
|
||||||
if [ -z "$installed_size" ]; then
|
|
||||||
read -p "Enter installed size: " installed_size
|
|
||||||
fi
|
|
||||||
if [ -z "$description" ]; then
|
|
||||||
read -p "Enter package description: " description
|
|
||||||
fi
|
|
||||||
if [ -z "$url" ]; then
|
|
||||||
read -p "Enter package URL: " url
|
|
||||||
fi
|
|
||||||
if [ -z "$license" ]; then
|
|
||||||
read -p "Enter license: " license
|
|
||||||
fi
|
|
||||||
if [ -z "$origin" ]; then
|
|
||||||
read -p "Enter origin: " origin
|
|
||||||
fi
|
|
||||||
if [ -z "$maintainer" ]; then
|
|
||||||
read -p "Enter maintainer: " maintainer
|
|
||||||
fi
|
|
||||||
if [ -z "$dependencies" ]; then
|
|
||||||
read -p "Enter dependencies: " dependencies
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Mock package file name
|
|
||||||
pkg_file="$name-$version.apk"
|
|
||||||
|
|
||||||
# Calculate checksums based on package details
|
|
||||||
checksum=$(calc_checksum "$pkg_file")
|
|
||||||
content_checksum=$(calc_checksum "$pkg_file")
|
|
||||||
|
|
||||||
timestamp=$(date +%s)
|
|
||||||
|
|
||||||
# Remove existing entry if present
|
|
||||||
sed -i "/^P:$name$/,/^$/d" "$OUTPUT_FILE" # Ensures the removal from 'P:$name' to the first empty line.
|
|
||||||
sed -i "/^C:$checksum/d" "$OUTPUT_FILE" # Additionally, ensures all occurrences of 'C:$checksum' are removed.
|
|
||||||
|
|
||||||
# Append new entry
|
|
||||||
cat >> "$OUTPUT_FILE" << EOF
|
|
||||||
C:$checksum
|
|
||||||
P:$name
|
|
||||||
V:$version
|
|
||||||
A:$arch
|
|
||||||
S:$size
|
|
||||||
I:$installed_size
|
|
||||||
T:$description
|
|
||||||
U:$url
|
|
||||||
L:$license
|
|
||||||
o:$origin
|
|
||||||
m:$maintainer
|
|
||||||
t:$timestamp
|
|
||||||
c:$content_checksum
|
|
||||||
D:$dependencies
|
|
||||||
p:$pkg_file
|
|
||||||
q:
|
|
||||||
Z:$checksum
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|
||||||
echo "APKINDEX has been created/updated successfully."
|
|
7
sourceforge_config.json
Normal file
7
sourceforge_config.json
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"SFKeyPath": "~/.ssh/id_rsa.pub",
|
||||||
|
"SFUser": "internet-addict",
|
||||||
|
"SFHost": "web.sourceforge.net",
|
||||||
|
"SFProject": "spitfire-browser"
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
# sourceforge_config.sh
|
|
||||||
SF_USER="internet-addict"
|
|
||||||
SF_PROJECT="spitfire-browser"
|
|
||||||
SF_HOST="frs.sourceforge.net"
|
|
||||||
SF_KEY_PATH="$HOME/.ssh/id_rsa" # Path to your SSH private key for SourceForge
|
|
181
spitfire/appindex.go
Normal file
181
spitfire/appindex.go
Normal file
|
@ -0,0 +1,181 @@
|
||||||
|
package spitfire
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha1"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Package the APPINDEX update process
|
||||||
|
func PackageAPPINDEX(name, release, version, arch, size, installedSize, description, url, license, origin, maintainer, dependencies, platform string) error {
|
||||||
|
// Mock package file name
|
||||||
|
pkgFile := fmt.Sprintf("%s-%s", name, version)
|
||||||
|
|
||||||
|
// Calculate checksums
|
||||||
|
checksum := calcChecksum(pkgFile)
|
||||||
|
contentChecksum := calcChecksum(pkgFile)
|
||||||
|
|
||||||
|
// Timestamp
|
||||||
|
timestamp := time.Now().Unix()
|
||||||
|
|
||||||
|
// Remove existing entry based on P, R, A, and o fields
|
||||||
|
removeExistingEntry(name, release, arch, origin)
|
||||||
|
|
||||||
|
// Open or create the APPINDEX file for appending
|
||||||
|
file, err := os.OpenFile("./APPINDEX", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to open APPINDEX file: %v", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// Write the new entry
|
||||||
|
entry := fmt.Sprintf(`
|
||||||
|
C:%s
|
||||||
|
P:%s
|
||||||
|
R:%s
|
||||||
|
V:%s
|
||||||
|
A:%s
|
||||||
|
S:%s
|
||||||
|
I:%s
|
||||||
|
T:%s
|
||||||
|
U:%s
|
||||||
|
L:%s
|
||||||
|
o:%s
|
||||||
|
m:%s
|
||||||
|
t:%d
|
||||||
|
c:%s
|
||||||
|
D:%s
|
||||||
|
p:%s
|
||||||
|
q:
|
||||||
|
Z:%s
|
||||||
|
|
||||||
|
`, checksum, name, release, version, arch, size, installedSize, description, url, license, origin, maintainer, timestamp, contentChecksum, dependencies, platform, checksum)
|
||||||
|
|
||||||
|
if _, err := file.WriteString(entry); err != nil {
|
||||||
|
log.Fatalf("Failed to write to APPINDEX file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("APPINDEX has been updated successfully.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// calcChecksum calculates a checksum (SHA-1) for a given input string
|
||||||
|
func calcChecksum(input string) string {
|
||||||
|
h := sha1.New()
|
||||||
|
_, _ = io.WriteString(h, input)
|
||||||
|
return fmt.Sprintf("%x", h.Sum(nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeExistingEntry removes an existing entry from APPINDEX based on P, R, A, and o fields
|
||||||
|
func removeExistingEntry(name, release, arch, origin string) {
|
||||||
|
// Read file contents
|
||||||
|
content, err := os.ReadFile("./APPINDEX")
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return // If file does not exist, no need to remove anything
|
||||||
|
}
|
||||||
|
log.Fatalf("Failed to read APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove lines matching the package with the same P, R, A, and o fields
|
||||||
|
lines := strings.Split(string(content), "\n")
|
||||||
|
var newLines []string
|
||||||
|
remove := false
|
||||||
|
for _, line := range lines {
|
||||||
|
// Detect start of an entry by matching on P, R, A, and o
|
||||||
|
if strings.HasPrefix(line, "P:"+name) {
|
||||||
|
remove = true
|
||||||
|
}
|
||||||
|
if remove && strings.HasPrefix(line, "R:"+release) {
|
||||||
|
remove = true
|
||||||
|
}
|
||||||
|
if remove && strings.HasPrefix(line, "A:"+arch) {
|
||||||
|
remove = true
|
||||||
|
}
|
||||||
|
if remove && strings.HasPrefix(line, "o:"+origin) {
|
||||||
|
remove = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop removal at the end of an entry
|
||||||
|
if remove && line == "" {
|
||||||
|
remove = false
|
||||||
|
continue // Skip the line
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append lines that are not part of the matching entry
|
||||||
|
if !remove {
|
||||||
|
newLines = append(newLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the updated contents back to the file
|
||||||
|
err = os.WriteFile("./APPINDEX", []byte(strings.Join(newLines, "\n")), 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to update APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CleanAppIndex cleans up any orphaned "C:" entries and collapses excessive newlines
|
||||||
|
func CleanAppIndex() error {
|
||||||
|
// Read file contents
|
||||||
|
content, err := os.ReadFile("./APPINDEX")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split the file content into lines
|
||||||
|
lines := strings.Split(string(content), "\n")
|
||||||
|
var newLines []string
|
||||||
|
var currentEntry []string
|
||||||
|
inEntry := false
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Start of an entry when we encounter a checksum
|
||||||
|
if strings.HasPrefix(line, "C:") {
|
||||||
|
// If we already have a valid entry, add it to newLines
|
||||||
|
if inEntry && len(currentEntry) > 1 {
|
||||||
|
newLines = append(newLines, currentEntry...)
|
||||||
|
}
|
||||||
|
currentEntry = []string{line}
|
||||||
|
inEntry = true
|
||||||
|
} else if inEntry && line == "" {
|
||||||
|
// End of an entry
|
||||||
|
if len(currentEntry) > 1 {
|
||||||
|
newLines = append(newLines, currentEntry...)
|
||||||
|
newLines = append(newLines, "") // Add a blank line to separate entries
|
||||||
|
}
|
||||||
|
currentEntry = nil
|
||||||
|
inEntry = false
|
||||||
|
} else if inEntry {
|
||||||
|
// Continue adding lines to the current entry
|
||||||
|
currentEntry = append(currentEntry, line)
|
||||||
|
} else if line != "" {
|
||||||
|
// Add non-entry lines (for extra safety)
|
||||||
|
newLines = append(newLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// In case the last entry was valid
|
||||||
|
if inEntry && len(currentEntry) > 1 {
|
||||||
|
newLines = append(newLines, currentEntry...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collapse consecutive blank lines
|
||||||
|
cleanedContent := strings.Join(newLines, "\n")
|
||||||
|
cleanedContent = strings.ReplaceAll(cleanedContent, "\n\n\n", "\n\n")
|
||||||
|
|
||||||
|
// Write the cleaned content back to the file
|
||||||
|
err = os.WriteFile("./APPINDEX", []byte(cleanedContent), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write cleaned APPINDEX: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("APPINDEX cleaned successfully.")
|
||||||
|
return nil
|
||||||
|
}
|
166
spitfire/build.go
Normal file
166
spitfire/build.go
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
package spitfire
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Array to store errors
|
||||||
|
var errors []string
|
||||||
|
|
||||||
|
// Run an external command like scp or rsync
|
||||||
|
func runCommand(command string, args ...string) error {
|
||||||
|
cmd := exec.Command(command, args...)
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
return cmd.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to resolve paths using absolute path
|
||||||
|
func ResolvePath(path string) (string, error) {
|
||||||
|
absPath, err := filepath.Abs(path)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to resolve path: %s", path)
|
||||||
|
}
|
||||||
|
return absPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to download Mozilla source if not present
|
||||||
|
func DownloadSource(sourcePath string, sourceRepo string) {
|
||||||
|
if _, err := os.Stat(sourcePath); os.IsNotExist(err) {
|
||||||
|
fmt.Println("Mozilla source not found. Cloning from repository...")
|
||||||
|
if err := runCommand("hg", "clone", sourceRepo, sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to clone Mozilla repository.")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Println("Mozilla source already exists.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to discard uncommitted changes
|
||||||
|
func DiscardChanges(sourcePath string) {
|
||||||
|
fmt.Println("Discarding uncommitted changes...")
|
||||||
|
if err := runCommand("hg", "revert", "--all", "--no-backup", "-R", sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to revert changes in Mozilla repository.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to clean build
|
||||||
|
func CleanBuild(sourcePath string) {
|
||||||
|
fmt.Println("Cleaning build...")
|
||||||
|
if err := os.Chdir(sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to navigate to source directory.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := runCommand("hg", "revert", "--all", "--no-backup"); err != nil {
|
||||||
|
errors = append(errors, "Failed to revert changes in Mozilla repository.")
|
||||||
|
}
|
||||||
|
if err := runCommand("./mach", "clobber"); err != nil {
|
||||||
|
errors = append(errors, "Failed to clean build.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to update Mozilla repository
|
||||||
|
func UpdateRepo(sourcePath string) {
|
||||||
|
fmt.Println("Updating Mozilla repository...")
|
||||||
|
if err := os.Chdir(sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to navigate to source directory.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := runCommand("hg", "pull", "-u"); err != nil {
|
||||||
|
errors = append(errors, "Failed to update Mozilla repository.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to update patches
|
||||||
|
func UpdatePatches(patchesDir, patchesRepo, sourcePath string) {
|
||||||
|
fmt.Println("Updating patches...")
|
||||||
|
if _, err := os.Stat(patchesDir); err == nil {
|
||||||
|
fmt.Println("Patches directory already exists. Cleaning and pulling updates...")
|
||||||
|
if err := os.Chdir(patchesDir); err != nil {
|
||||||
|
errors = append(errors, "Failed to navigate to patches directory.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := runCommand("git", "clean", "-xdf"); err != nil {
|
||||||
|
errors = append(errors, "Failed to clean patches directory.")
|
||||||
|
}
|
||||||
|
_ = runCommand("git", "stash", "push", "--include-untracked")
|
||||||
|
if err := runCommand("git", "fetch"); err != nil {
|
||||||
|
errors = append(errors, "Failed to fetch updates from patches repository.")
|
||||||
|
}
|
||||||
|
if runCommand("git", "show-ref", "--verify", "--quiet", "refs/heads/main") == nil {
|
||||||
|
if err := runCommand("git", "rebase", "origin/main"); err != nil {
|
||||||
|
errors = append(errors, "Failed to rebase updates from main branch.")
|
||||||
|
}
|
||||||
|
} else if runCommand("git", "show-ref", "--verify", "--quiet", "refs/heads/master") == nil {
|
||||||
|
if err := runCommand("git", "rebase", "origin/master"); err != nil {
|
||||||
|
errors = append(errors, "Failed to rebase updates from master branch.")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
errors = append(errors, "No valid branch (main or master) found in patches repository.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if runCommand("git", "stash", "list") == nil {
|
||||||
|
_ = runCommand("git", "stash", "pop")
|
||||||
|
} else {
|
||||||
|
fmt.Println("No stash entries found, skipping pop.")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Println("Patches directory does not exist. Cloning repository...")
|
||||||
|
if err := runCommand("git", "clone", patchesRepo, patchesDir); err != nil {
|
||||||
|
errors = append(errors, "Failed to clone patches repository.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Println("Copying files from patches directory to Firefox source directory...")
|
||||||
|
if err := runCommand("rsync", "-av", "--exclude=.git", patchesDir+"/", sourcePath+"/"); err != nil {
|
||||||
|
errors = append(errors, "Failed to copy files.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to configure Spitfire
|
||||||
|
func Configure(sourcePath string) {
|
||||||
|
fmt.Println("Configuring Spitfire...")
|
||||||
|
if err := os.Chdir(sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to navigate to source directory.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := runCommand("./mach", "configure"); err != nil {
|
||||||
|
errors = append(errors, "Configuration failed.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to build Spitfire
|
||||||
|
func Build(sourcePath string) {
|
||||||
|
fmt.Println("Building Spitfire...")
|
||||||
|
if err := os.Chdir(sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to navigate to source directory.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := runCommand("./mach", "build"); err != nil {
|
||||||
|
errors = append(errors, "Build failed.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to run the project after build
|
||||||
|
func RunProject(sourcePath string) {
|
||||||
|
fmt.Println("Running the project...")
|
||||||
|
if err := os.Chdir(sourcePath); err != nil {
|
||||||
|
errors = append(errors, "Failed to navigate to source directory.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := runCommand("./mach", "run"); err != nil {
|
||||||
|
errors = append(errors, "Failed to run the project.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to print collected errors
|
||||||
|
func PrintErrors() {
|
||||||
|
if len(errors) > 0 {
|
||||||
|
fmt.Println("The following errors occurred during execution:")
|
||||||
|
for _, err := range errors {
|
||||||
|
fmt.Printf("- %s\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
246
spitfire/upload.go
Normal file
246
spitfire/upload.go
Normal file
|
@ -0,0 +1,246 @@
|
||||||
|
package spitfire
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/tar"
|
||||||
|
"crypto/rand"
|
||||||
|
"compress/gzip"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"encoding/hex"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config struct to hold SourceForge configurations
|
||||||
|
type Config struct {
|
||||||
|
SFKeyPath string
|
||||||
|
SFUser string
|
||||||
|
SFHost string
|
||||||
|
SFProject string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the SourceForge configuration from a file
|
||||||
|
func LoadConfig() (*Config, error) {
|
||||||
|
file, err := os.Open("sourceforge_config.json") // Assuming a JSON config file
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
config := &Config{}
|
||||||
|
if err := json.NewDecoder(file).Decode(config); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompressDirectory compresses the build directory to a tar.gz file using PAX format for large file support
|
||||||
|
func CompressDirectory(srcDir, dstFile string) error {
|
||||||
|
// Create the destination file
|
||||||
|
f, err := os.Create(dstFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create file %s: %v", dstFile, err)
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
// Create a new gzip writer
|
||||||
|
gw := gzip.NewWriter(f)
|
||||||
|
defer gw.Close()
|
||||||
|
|
||||||
|
// Create a new tar writer with PAX format for large file support
|
||||||
|
tw := tar.NewWriter(gw)
|
||||||
|
defer tw.Close()
|
||||||
|
|
||||||
|
// Walk through the source directory and add files to the tar archive
|
||||||
|
err = filepath.Walk(srcDir, func(file string, fi os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create tar header using PAX format
|
||||||
|
header, err := tar.FileInfoHeader(fi, "")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the correct header name, preserving the relative directory structure
|
||||||
|
relPath, err := filepath.Rel(srcDir, file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
header.Name = relPath
|
||||||
|
|
||||||
|
// Explicitly set the type flag for directories
|
||||||
|
if fi.IsDir() {
|
||||||
|
header.Typeflag = tar.TypeDir
|
||||||
|
} else if fi.Mode()&os.ModeSymlink != 0 {
|
||||||
|
// Handle symlinks
|
||||||
|
linkTarget, err := os.Readlink(file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
header.Linkname = linkTarget
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the header to the tarball
|
||||||
|
if err := tw.WriteHeader(header); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it's a directory or symlink, skip writing its contents
|
||||||
|
if fi.IsDir() || fi.Mode()&os.ModeSymlink != 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open the file for reading
|
||||||
|
f, err := os.Open(file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
// Copy the file content to the tar writer
|
||||||
|
if _, err := io.Copy(tw, f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error walking the source directory %s: %v", srcDir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload the file to SourceForge, ensuring the local directory structure is created and uploaded
|
||||||
|
func Upload(config *Config, buildPath, remoteDir string) error {
|
||||||
|
// Generate a random hash for the temp directory name
|
||||||
|
randomHash, err := generateRandomHash(8) // 8 bytes = 16 hex characters
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate random hash: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a temporary directory with the random hash appended
|
||||||
|
tmpDir, err := os.MkdirTemp("", "spitfire-upload-"+randomHash)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create temporary directory: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the required local directory structure inside the temporary directory
|
||||||
|
localDir := filepath.Join(tmpDir, remoteDir)
|
||||||
|
err = os.MkdirAll(localDir, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create local directory structure: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the build file to the local directory structure
|
||||||
|
destinationFile := filepath.Join(localDir, filepath.Base(buildPath))
|
||||||
|
err = copyFile(buildPath, destinationFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to copy file to local directory structure: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload the entire local directory structure to the remote directory
|
||||||
|
fmt.Printf("Uploading file %s to %s on SourceForge...\n", buildPath, remoteDir)
|
||||||
|
scpCmd := exec.Command("scp", "-i", config.SFKeyPath, "-r", tmpDir+"/.", fmt.Sprintf("%s@%s:%s", config.SFUser, config.SFHost, "/"))
|
||||||
|
scpCmd.Stdout = os.Stdout
|
||||||
|
scpCmd.Stderr = os.Stderr
|
||||||
|
return scpCmd.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to generate a random hash
|
||||||
|
func generateRandomHash(length int) (string, error) {
|
||||||
|
bytes := make([]byte, length)
|
||||||
|
_, err := rand.Read(bytes)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(bytes), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to copy a file from src to dst
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
sourceFile, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer sourceFile.Close()
|
||||||
|
|
||||||
|
destFile, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer destFile.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(destFile, sourceFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return destFile.Sync() // Ensure all writes to the file are flushed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download the APPINDEX file from SourceForge
|
||||||
|
func DownloadAPPINDEX(config *Config, remoteDir string) error {
|
||||||
|
fmt.Println("Downloading APPINDEX from SourceForge...")
|
||||||
|
|
||||||
|
// Construct the correct path without double slashes
|
||||||
|
remoteAPPINDEXPath := filepath.Join(remoteDir, "APPINDEX")
|
||||||
|
|
||||||
|
// Run the SCP command to download the APPINDEX file
|
||||||
|
cmd := exec.Command("scp", "-i", config.SFKeyPath, fmt.Sprintf("%s@%s:%s", config.SFUser, config.SFHost, remoteAPPINDEXPath), "./APPINDEX")
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
|
||||||
|
err := cmd.Run()
|
||||||
|
if err != nil {
|
||||||
|
// Check if the error is due to the file not existing
|
||||||
|
if strings.Contains(err.Error(), "No such file or directory") {
|
||||||
|
fmt.Println("APPINDEX file not found on the server. A new one will be created.")
|
||||||
|
return nil // Continue without failing if the APPINDEX is missing
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to download APPINDEX: %v", err) // Fail for other types of errors
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("APPINDEX downloaded successfully.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload the updated APPINDEX file to SourceForge
|
||||||
|
func UploadAPPINDEX(config *Config, remoteDir string) error {
|
||||||
|
fmt.Println("Uploading updated APPINDEX to SourceForge...")
|
||||||
|
cmd := exec.Command("scp", "-i", config.SFKeyPath, "./APPINDEX", fmt.Sprintf("%s@%s:%s", config.SFUser, config.SFHost, remoteDir))
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
return cmd.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDirectorySize calculates the total size of all files in a directory
|
||||||
|
func GetDirectorySize(path string) (int64, error) {
|
||||||
|
var size int64
|
||||||
|
err := filepath.Walk(path, func(_ string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
size += info.Size()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
return size, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetFileSize returns the size of a file in bytes
|
||||||
|
func GetFileSize(filePath string) (int64, error) {
|
||||||
|
fileInfo, err := os.Stat(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return fileInfo.Size(), nil
|
||||||
|
}
|
165
upload.sh
165
upload.sh
|
@ -1,165 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Load SourceForge configuration
|
|
||||||
source sourceforge_config.sh
|
|
||||||
|
|
||||||
# Function to print usage instructions
|
|
||||||
print_help() {
|
|
||||||
echo "Usage: ./upload.sh -p=<path-to-build> -t=<target> [-c|--compress] [-v|--version=<version>]"
|
|
||||||
echo ""
|
|
||||||
echo "Options:"
|
|
||||||
echo " -p, --path : Path to the build directory"
|
|
||||||
echo " -t, --target : Target location format: component-arch-release-platform"
|
|
||||||
echo " -c, --compress : Compress the build directory into a tar.gz file before uploading"
|
|
||||||
echo " -v, --version : Specify version for the package. For nightly, use current date if not specified. For stable, increment version if not specified."
|
|
||||||
echo " -h, --help : Display this help message"
|
|
||||||
echo ""
|
|
||||||
echo "Example use:"
|
|
||||||
echo " # Without compression"
|
|
||||||
echo " ./upload.sh -p=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -t=browser-x86_64-stable-linux -v=1.0"
|
|
||||||
echo ""
|
|
||||||
echo " # With compression"
|
|
||||||
echo " ./upload.sh -p=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -t=browser-x86_64-stable-linux -c -v=1.0"
|
|
||||||
echo ""
|
|
||||||
echo " # Nightly build without specifying version"
|
|
||||||
echo " ./upload.sh -p=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -t=browser-x86_64-nightly-linux -c"
|
|
||||||
|
|
||||||
exit 0
|
|
||||||
}
|
|
||||||
|
|
||||||
COMPRESS=false
|
|
||||||
VERSION=""
|
|
||||||
|
|
||||||
# Parse command line arguments
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case $1 in
|
|
||||||
-p=*|--path=*)
|
|
||||||
BUILD_PATH="${1#*=}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
-t=*|--target=*)
|
|
||||||
TARGET="${1#*=}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
-c|--compress)
|
|
||||||
COMPRESS=true
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
-v=*|--version=*)
|
|
||||||
VERSION="${1#*=}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
-h|--help)
|
|
||||||
print_help
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "Invalid option: $1"
|
|
||||||
print_help
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# Check if both required arguments are provided
|
|
||||||
if [ -z "$BUILD_PATH" ] || [ -z "$TARGET" ]; then
|
|
||||||
echo "Error: Both path and target must be specified."
|
|
||||||
print_help
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Split the target into its components
|
|
||||||
IFS='-' read -r COMPONENT ARCH RELEASE PLATFORM <<< "$TARGET"
|
|
||||||
|
|
||||||
# Download the existing packages.json
|
|
||||||
echo "Downloading existing packages.json from SourceForge..."
|
|
||||||
scp -i "$SF_KEY_PATH" "$SF_USER@$SF_HOST:/home/frs/project/$SF_PROJECT/packages.json" packages.json 2>/dev/null || { echo "Failed to download packages.json. Creating a new one."; echo "{}" > packages.json; }
|
|
||||||
|
|
||||||
# Check if packages.json is a valid JSON object
|
|
||||||
if ! jq empty packages.json >/dev/null 2>&1; then
|
|
||||||
echo "Invalid packages.json format. Resetting to an empty JSON object."
|
|
||||||
echo "{}" > packages.json
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Handle versioning
|
|
||||||
if [ -z "$VERSION" ]; then
|
|
||||||
if [[ "$RELEASE" == "nightly" ]]; then
|
|
||||||
VERSION=$(date +"%Y-%m-%d")
|
|
||||||
elif [[ "$RELEASE" == "stable" ]]; then
|
|
||||||
CURRENT_VERSION=$(jq -r --arg comp "$COMPONENT" --arg arch "$ARCH" --arg rel "$RELEASE" --arg plat "$PLATFORM" '.[$comp][$arch][$rel][$plat] | keys | map(select(test("^[0-9]+\\.[0-9]+$"))) | max' packages.json)
|
|
||||||
if [ -n "$CURRENT_VERSION" ]; then
|
|
||||||
MAJOR_VERSION=$(echo $CURRENT_VERSION | cut -d. -f1)
|
|
||||||
MINOR_VERSION=$(echo $CURRENT_VERSION | cut -d. -f2)
|
|
||||||
MINOR_VERSION=$((MINOR_VERSION + 1))
|
|
||||||
VERSION="${MAJOR_VERSION}.${MINOR_VERSION}"
|
|
||||||
else
|
|
||||||
VERSION="1.0"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Determine the upload directory based on the target and version
|
|
||||||
if [[ "$RELEASE" == "nightly" ]]; then
|
|
||||||
if [[ "$VERSION" == $(date +"%Y-%m-%d") ]]; then
|
|
||||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/latest"
|
|
||||||
else
|
|
||||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/$VERSION"
|
|
||||||
fi
|
|
||||||
elif [[ "$RELEASE" == "stable" ]]; then
|
|
||||||
if [[ "$VERSION" =~ ^[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/$VERSION"
|
|
||||||
else
|
|
||||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/latest"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Construct the remote directory path
|
|
||||||
REMOTE_DIR="/home/frs/project/$SF_PROJECT/$UPLOAD_DIR"
|
|
||||||
|
|
||||||
# Handle compression if specified
|
|
||||||
if [ "$COMPRESS" = true ]; then
|
|
||||||
COMPRESSED_FILE="/tmp/${TARGET}.tar.gz"
|
|
||||||
echo "Compressing $BUILD_PATH into $COMPRESSED_FILE..."
|
|
||||||
tar -czf "$COMPRESSED_FILE" -C "$BUILD_PATH" .
|
|
||||||
BUILD_PATH="$COMPRESSED_FILE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Upload the files to SourceForge
|
|
||||||
echo "Uploading files from $BUILD_PATH to $REMOTE_DIR on SourceForge..."
|
|
||||||
scp -i "$SF_KEY_PATH" "$BUILD_PATH" "$SF_USER@$SF_HOST:$REMOTE_DIR/" 2>/dev/null
|
|
||||||
UPLOAD_STATUS=$?
|
|
||||||
|
|
||||||
if [ $UPLOAD_STATUS -ne 0 ]; then
|
|
||||||
echo "Failed to upload files directly. Creating local directory structure and uploading..."
|
|
||||||
|
|
||||||
# Create the local directory structure
|
|
||||||
TEMP_DIR=$(mktemp -d)
|
|
||||||
mkdir -p "$TEMP_DIR/$UPLOAD_DIR"
|
|
||||||
|
|
||||||
# Upload the directory structure
|
|
||||||
rsync -av --omit-dir-times --no-perms -e "ssh -i $SF_KEY_PATH" "$TEMP_DIR/" "$SF_USER@$SF_HOST:/home/frs/project/$SF_PROJECT/" || { echo "Failed to upload directory structure. Exiting."; rm -rf "$TEMP_DIR"; exit 1; }
|
|
||||||
|
|
||||||
# Clean up the temporary directory
|
|
||||||
rm -rf "$TEMP_DIR"
|
|
||||||
|
|
||||||
# Retry uploading the files
|
|
||||||
scp -i "$SF_KEY_PATH" "$BUILD_PATH" "$SF_USER@$SF_HOST:$REMOTE_DIR/" || { echo "Failed to upload files after creating directory structure. Exiting."; exit 1; }
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Update packages.json with the new information
|
|
||||||
jq --arg comp "$COMPONENT" --arg arch "$ARCH" --arg rel "$RELEASE" --arg plat "$PLATFORM" --arg ver "$VERSION" \
|
|
||||||
'if .[$comp] == null then .[$comp] = {} else .[$comp] end |
|
|
||||||
if .[$comp][$arch] == null then .[$comp][$arch] = {} else .[$comp][$arch] end |
|
|
||||||
if .[$comp][$arch][$rel] == null then .[$comp][$arch][$rel] = {} else .[$comp][$arch][$rel] end |
|
|
||||||
if .[$comp][$arch][$rel][$plat] == null then .[$comp][$arch][$rel][$plat] = {} else .[$comp][$arch][$rel][$plat] end |
|
|
||||||
.[$comp][$arch][$rel][$plat][$ver] = $ver' packages.json > packages_temp.json && mv packages_temp.json packages.json
|
|
||||||
|
|
||||||
# Upload the updated packages.json to the root directory
|
|
||||||
echo "Uploading packages.json to the root directory on SourceForge..."
|
|
||||||
scp -i "$SF_KEY_PATH" packages.json "$SF_USER@$SF_HOST:/home/frs/project/$SF_PROJECT/" || { echo "Failed to upload packages.json. Exiting."; exit 1; }
|
|
||||||
|
|
||||||
echo "Upload completed successfully."
|
|
||||||
|
|
||||||
# Clean up compressed file if it was created
|
|
||||||
if [ "$COMPRESS" = true ]; then
|
|
||||||
rm "$COMPRESSED_FILE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 0
|
|
Loading…
Add table
Add a link
Reference in a new issue