Files
server-manager/build.py
chrome-storm-c442 9b0e4c76a3 v1.9.0: S3 server type — bucket/object browser, drag-and-drop upload, resilient transfers
New server type: S3 (MinIO, AWS, any S3-compatible storage)
- core/s3_client.py: boto3 client with auto-reconnect, 10 retries, exponential backoff, multipart upload/download, tcp_keepalive
- gui/tabs/s3_tab.py: object browser (Treeview), bucket selector, folder navigation, drag-and-drop upload from Explorer (windnd), progress bar with %, multi-file upload
- CLI: --s3-buckets, --s3-ls, --s3-upload, --s3-download, --s3-delete with retry
- ServerDialog: access_key, secret_key, bucket fields
- Registration: server_store, connection_factory, status_checker, icons, app, i18n (EN/RU/ZH)
- Fix: build.py cleanup_old_releases now sorts by semver (was lexicographic, broke v1.8.100+)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-03 06:32:03 -05:00

319 lines
9.6 KiB
Python

#!/usr/bin/env python3
"""
Build script — creates platform-specific executables via PyInstaller.
Run on the target OS to build for that platform.
Usage:
python build.py # build for current platform
python build.py --clean # clean build artifacts first
"""
import base64
import json
import os
import re
import sys
import shutil
import platform
import subprocess
import urllib.error
import urllib.request
# Add project root
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, PROJECT_DIR)
def auto_bump_version() -> str:
"""Auto-increment patch version in version.py on every build."""
ver_file = os.path.join(PROJECT_DIR, "version.py")
with open(ver_file, "r", encoding="utf-8") as f:
content = f.read()
match = re.search(r'__version__\s*=\s*"(\d+)\.(\d+)\.(\d+)"', content)
if not match:
print("ERROR: Cannot parse version from version.py")
sys.exit(1)
major, minor, patch = int(match.group(1)), int(match.group(2)), int(match.group(3))
new_patch = patch + 1
new_version = f"{major}.{minor}.{new_patch}"
content = re.sub(
r'__version__\s*=\s*"[\d.]+"',
f'__version__ = "{new_version}"',
content,
)
with open(ver_file, "w", encoding="utf-8") as f:
f.write(content)
print(f"Version bumped: {major}.{minor}.{patch} -> {new_version}")
return new_version
# Auto-bump unless --no-bump flag is passed
if "--no-bump" not in sys.argv:
_version = auto_bump_version()
else:
sys.argv.remove("--no-bump")
_version = None
from version import __version__, __app_name__
DIST_DIR = os.path.join(PROJECT_DIR, "dist")
BUILD_DIR = os.path.join(PROJECT_DIR, "build")
RELEASES_DIR = os.path.join(PROJECT_DIR, "releases")
def get_platform_tag() -> str:
system = platform.system().lower()
machine = platform.machine().lower()
arch_map = {
"x86_64": "x64", "amd64": "x64",
"x86": "x32", "i686": "x32", "i386": "x32",
"aarch64": "arm64", "arm64": "arm64",
"armv7l": "arm",
}
arch = arch_map.get(machine, machine)
os_map = {"windows": "win", "linux": "linux", "darwin": "mac"}
os_tag = os_map.get(system, system)
return f"{os_tag}-{arch}"
def clean():
for d in [DIST_DIR, BUILD_DIR]:
if os.path.exists(d):
shutil.rmtree(d)
for f in os.listdir(PROJECT_DIR):
if f.endswith(".spec"):
os.remove(os.path.join(PROJECT_DIR, f))
print("Cleaned build artifacts")
def build():
tag = get_platform_tag()
print(f"Building {__app_name__} v{__version__} for {tag}...")
system = platform.system().lower()
# PyInstaller command
cmd_parts = [
sys.executable, "-m", "PyInstaller",
"--onefile",
"--windowed",
f"--name={__app_name__}",
"--add-data", f"config/servers.example.json{os.pathsep}config",
"--add-data", f"tools/ssh.py{os.pathsep}tools",
"--add-data", f"tools/skill-ssh.md{os.pathsep}tools",
"--add-data", f"core/encryption.py{os.pathsep}core",
]
# Icon
icon_path = os.path.join(PROJECT_DIR, "assets", "icon.ico")
if os.path.exists(icon_path):
cmd_parts.extend(["--icon", icon_path])
# Hidden imports for customtkinter and connection libraries
cmd_parts.extend([
"--hidden-import", "customtkinter",
"--hidden-import", "PIL",
"--hidden-import", "pyotp",
"--hidden-import", "pyte",
"--hidden-import", "psutil",
"--hidden-import", "pymysql",
"--hidden-import", "psycopg2",
"--hidden-import", "pymssql",
"--hidden-import", "redis",
"--hidden-import", "requests",
"--hidden-import", "winrm",
"--hidden-import", "telnetlib3",
"--collect-all", "customtkinter",
])
cmd_parts.append("main.py")
os.chdir(PROJECT_DIR)
ret = os.system(" ".join(f'"{p}"' if " " in p else p for p in cmd_parts))
if ret != 0:
print(f"Build failed with code {ret}")
sys.exit(1)
# Move to releases
os.makedirs(RELEASES_DIR, exist_ok=True)
if system == "windows":
src = os.path.join(DIST_DIR, f"{__app_name__}.exe")
dst = os.path.join(RELEASES_DIR, f"{__app_name__}-v{__version__}-{tag}.exe")
elif system == "darwin":
src = os.path.join(DIST_DIR, __app_name__)
dst = os.path.join(RELEASES_DIR, f"{__app_name__}-v{__version__}-{tag}")
else:
src = os.path.join(DIST_DIR, __app_name__)
dst = os.path.join(RELEASES_DIR, f"{__app_name__}-v{__version__}-{tag}")
if os.path.exists(src):
shutil.copy2(src, dst)
size_mb = os.path.getsize(dst) / (1024 * 1024)
print(f"\nBuild complete: {dst} ({size_mb:.1f} MB)")
else:
print(f"Build output not found: {src}")
sys.exit(1)
# Auto-cleanup: keep first release + last 5 (per CLAUDE.md policy)
cleanup_old_releases()
# Auto-deploy: sync shared files so Claude Code always has the latest
deploy_shared_files()
# Publish release to Gitea
publish_gitea_release(dst)
def _get_gitea_auth() -> dict:
"""Get Gitea auth headers from git remote 'sensey'."""
try:
_flags = subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0
r = subprocess.run(
["git", "remote", "get-url", "sensey"],
capture_output=True, text=True, cwd=PROJECT_DIR, creationflags=_flags,
)
m = re.match(r"https://([^:]+):([^@]+)@", r.stdout.strip())
if m:
user, pw = m.groups()
token = base64.b64encode(f"{user}:{pw}".encode()).decode()
return {"Authorization": f"Basic {token}"}
except Exception:
pass
return {}
_GITEA_API = "https://git.sensey24.ru/api/v1/repos/aibot777/server-manager"
def publish_gitea_release(exe_path: str):
"""Create a Gitea release and upload the exe as asset."""
auth = _get_gitea_auth()
if not auth:
print("Gitea publish skipped: no auth (git remote 'sensey' not found)")
return
tag = f"v{__version__}"
filename = os.path.basename(exe_path)
# Create release
try:
data = json.dumps({
"tag_name": tag,
"name": tag,
"body": f"Release {tag}",
}).encode()
req = urllib.request.Request(
f"{_GITEA_API}/releases",
data=data,
headers={**auth, "Content-Type": "application/json"},
method="POST",
)
resp = urllib.request.urlopen(req, timeout=30)
release = json.loads(resp.read())
release_id = release["id"]
except urllib.error.HTTPError as e:
if e.code == 409:
print(f"Gitea release {tag} already exists, skipping")
else:
print(f"Gitea release creation failed: {e}")
return
except Exception as e:
print(f"Gitea release creation failed: {e}")
return
# Upload asset
try:
with open(exe_path, "rb") as f:
file_data = f.read()
req = urllib.request.Request(
f"{_GITEA_API}/releases/{release_id}/assets?name={filename}",
data=file_data,
headers={**auth, "Content-Type": "application/octet-stream"},
method="POST",
)
resp = urllib.request.urlopen(req, timeout=180)
asset = json.loads(resp.read())
size_mb = asset["size"] / (1024 * 1024)
print(f"Gitea release published: {tag} ({filename}, {size_mb:.1f} MB)")
except Exception as e:
print(f"Gitea asset upload failed: {e}")
def _version_key(path: str):
"""Extract (major, minor, patch) tuple for semver sorting."""
m = re.search(r'v(\d+)\.(\d+)\.(\d+)', os.path.basename(path))
if m:
return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
return (0, 0, 0)
def cleanup_old_releases():
"""Keep the first release (v1.0.0) and the last 5 releases, delete the rest."""
import glob
pattern = os.path.join(RELEASES_DIR, f"{__app_name__}-v*")
all_exes = sorted(glob.glob(pattern), key=_version_key)
if len(all_exes) <= 6: # first + 5 = 6, nothing to clean
return
# First release is always all_exes[0] (sorted by semver, v1.0.0 < v1.8.x)
first = all_exes[0]
last_5 = all_exes[-5:]
keep = set([first] + last_5)
removed = []
for f in all_exes:
if f not in keep:
os.remove(f)
removed.append(os.path.basename(f))
if removed:
print(f"Cleaned {len(removed)} old releases: {', '.join(removed)}")
def deploy_shared_files():
"""Auto-deploy ssh.py, encryption.py, skill to shared dirs after build.
Ensures Claude Code /ssh skill always uses the latest version.
Without this, editing tools/ssh.py updates the exe but NOT the live
~/.server-connections/ssh.py that Claude Code actually calls.
"""
shared_dir = os.path.expanduser("~/.server-connections")
skill_dir = os.path.expanduser("~/.claude/commands")
deploy_map = [
(os.path.join(PROJECT_DIR, "tools", "ssh.py"),
os.path.join(shared_dir, "ssh.py")),
(os.path.join(PROJECT_DIR, "core", "encryption.py"),
os.path.join(shared_dir, "encryption.py")),
(os.path.join(PROJECT_DIR, "tools", "skill-ssh.md"),
os.path.join(skill_dir, "ssh.md")),
]
deployed = []
for src, dst in deploy_map:
if not os.path.exists(src):
continue
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.copy2(src, dst)
deployed.append(os.path.basename(dst))
if deployed:
print(f"Auto-deployed to local: {', '.join(deployed)}")
if __name__ == "__main__":
if "--clean" in sys.argv:
clean()
build()