392 lines
12 KiB
Python
392 lines
12 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Build script — creates platform-specific executables via PyInstaller.
|
|
Run on the target OS to build for that platform.
|
|
|
|
Usage:
|
|
python build.py # build for current platform
|
|
python build.py --clean # clean build artifacts first
|
|
"""
|
|
|
|
import base64
|
|
import json
|
|
import os
|
|
import re
|
|
import sys
|
|
import shutil
|
|
import platform
|
|
import subprocess
|
|
import urllib.error
|
|
import urllib.request
|
|
|
|
# Add project root
|
|
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
sys.path.insert(0, PROJECT_DIR)
|
|
|
|
|
|
def auto_bump_version() -> str:
|
|
"""Auto-increment patch version in version.py on every build."""
|
|
ver_file = os.path.join(PROJECT_DIR, "version.py")
|
|
with open(ver_file, "r", encoding="utf-8") as f:
|
|
content = f.read()
|
|
|
|
match = re.search(r'__version__\s*=\s*"(\d+)\.(\d+)\.(\d+)"', content)
|
|
if not match:
|
|
print("ERROR: Cannot parse version from version.py")
|
|
sys.exit(1)
|
|
|
|
major, minor, patch = int(match.group(1)), int(match.group(2)), int(match.group(3))
|
|
|
|
# Patch grows up to 99, then minor+1 and patch resets to 1
|
|
if patch >= 99:
|
|
minor += 1
|
|
new_patch = 1
|
|
else:
|
|
new_patch = patch + 1
|
|
new_version = f"{major}.{minor}.{new_patch}"
|
|
|
|
content = re.sub(
|
|
r'__version__\s*=\s*"[\d.]+"',
|
|
f'__version__ = "{new_version}"',
|
|
content,
|
|
)
|
|
with open(ver_file, "w", encoding="utf-8") as f:
|
|
f.write(content)
|
|
|
|
print(f"Version bumped: {major}.{minor}.{patch} -> {new_version}")
|
|
return new_version
|
|
|
|
|
|
# Auto-bump unless --no-bump flag is passed
|
|
if "--no-bump" not in sys.argv:
|
|
_version = auto_bump_version()
|
|
else:
|
|
sys.argv.remove("--no-bump")
|
|
_version = None
|
|
|
|
from version import __version__, __app_name__
|
|
|
|
DIST_DIR = os.path.join(PROJECT_DIR, "dist")
|
|
BUILD_DIR = os.path.join(PROJECT_DIR, "build")
|
|
RELEASES_DIR = os.path.join(PROJECT_DIR, "releases")
|
|
|
|
|
|
def get_platform_tag() -> str:
|
|
system = platform.system().lower()
|
|
machine = platform.machine().lower()
|
|
|
|
arch_map = {
|
|
"x86_64": "x64", "amd64": "x64",
|
|
"x86": "x32", "i686": "x32", "i386": "x32",
|
|
"aarch64": "arm64", "arm64": "arm64",
|
|
"armv7l": "arm",
|
|
}
|
|
arch = arch_map.get(machine, machine)
|
|
|
|
os_map = {"windows": "win", "linux": "linux", "darwin": "mac"}
|
|
os_tag = os_map.get(system, system)
|
|
|
|
return f"{os_tag}-{arch}"
|
|
|
|
|
|
def clean():
|
|
for d in [DIST_DIR, BUILD_DIR]:
|
|
if os.path.exists(d):
|
|
shutil.rmtree(d)
|
|
for f in os.listdir(PROJECT_DIR):
|
|
if f.endswith(".spec"):
|
|
os.remove(os.path.join(PROJECT_DIR, f))
|
|
print("Cleaned build artifacts")
|
|
|
|
|
|
def build():
|
|
tag = get_platform_tag()
|
|
print(f"Building {__app_name__} v{__version__} for {tag}...")
|
|
|
|
system = platform.system().lower()
|
|
|
|
# PyInstaller command
|
|
cmd_parts = [
|
|
sys.executable, "-m", "PyInstaller",
|
|
"--onefile",
|
|
"--windowed",
|
|
f"--name={__app_name__}",
|
|
"--add-data", f"config/servers.example.json{os.pathsep}config",
|
|
"--add-data", f"tools/ssh.py{os.pathsep}tools",
|
|
"--add-data", f"tools/skill-ssh.md{os.pathsep}tools",
|
|
"--add-data", f"tools/install_ai_integrations.py{os.pathsep}tools",
|
|
"--add-data", f"core/encryption.py{os.pathsep}core",
|
|
"--add-data", f".codex/skills/server-manager{os.pathsep}.codex/skills/server-manager",
|
|
"--add-data", f".gemini/skills/server-manager{os.pathsep}.gemini/skills/server-manager",
|
|
"--add-data", f".gemini/settings.json{os.pathsep}.gemini",
|
|
"--add-data", f"GEMINI.md{os.pathsep}.",
|
|
]
|
|
|
|
# PNG icons for GUI (Material Design)
|
|
icons_dir = os.path.join(PROJECT_DIR, "assets", "icons")
|
|
if os.path.isdir(icons_dir):
|
|
cmd_parts.extend(["--add-data", f"assets/icons{os.pathsep}assets/icons"])
|
|
else:
|
|
print("WARNING: assets/icons/ not found, building without PNG icons")
|
|
|
|
# Icon
|
|
icon_path = os.path.join(PROJECT_DIR, "assets", "icon.ico")
|
|
if os.path.exists(icon_path):
|
|
cmd_parts.extend(["--icon", icon_path])
|
|
|
|
# Hidden imports for customtkinter and connection libraries
|
|
cmd_parts.extend([
|
|
"--hidden-import", "customtkinter",
|
|
"--hidden-import", "PIL",
|
|
"--hidden-import", "PIL._tkinter_finder",
|
|
"--hidden-import", "pyotp",
|
|
"--hidden-import", "pyte",
|
|
"--hidden-import", "psutil",
|
|
"--hidden-import", "pymysql",
|
|
"--hidden-import", "psycopg2",
|
|
"--hidden-import", "pymssql",
|
|
"--hidden-import", "redis",
|
|
"--hidden-import", "requests",
|
|
"--hidden-import", "winrm",
|
|
"--hidden-import", "telnetlib3",
|
|
"--collect-all", "customtkinter",
|
|
])
|
|
|
|
cmd_parts.append("main.py")
|
|
|
|
os.chdir(PROJECT_DIR)
|
|
ret = os.system(" ".join(f'"{p}"' if " " in p else p for p in cmd_parts))
|
|
|
|
if ret != 0:
|
|
print(f"Build failed with code {ret}")
|
|
sys.exit(1)
|
|
|
|
# Move to releases
|
|
os.makedirs(RELEASES_DIR, exist_ok=True)
|
|
|
|
if system == "windows":
|
|
src = os.path.join(DIST_DIR, f"{__app_name__}.exe")
|
|
dst = os.path.join(RELEASES_DIR, f"{__app_name__}-v{__version__}-{tag}.exe")
|
|
elif system == "darwin":
|
|
src = os.path.join(DIST_DIR, __app_name__)
|
|
dst = os.path.join(RELEASES_DIR, f"{__app_name__}-v{__version__}-{tag}")
|
|
else:
|
|
src = os.path.join(DIST_DIR, __app_name__)
|
|
dst = os.path.join(RELEASES_DIR, f"{__app_name__}-v{__version__}-{tag}")
|
|
|
|
if os.path.exists(src):
|
|
shutil.copy2(src, dst)
|
|
size_mb = os.path.getsize(dst) / (1024 * 1024)
|
|
print(f"\nBuild complete: {dst} ({size_mb:.1f} MB)")
|
|
else:
|
|
print(f"Build output not found: {src}")
|
|
sys.exit(1)
|
|
|
|
# Auto-cleanup: keep first release + last 5 (per CLAUDE.md policy)
|
|
cleanup_old_releases()
|
|
|
|
# Auto-deploy: sync shared files so Claude Code always has the latest
|
|
deploy_shared_files()
|
|
|
|
# Publish release to Gitea
|
|
publish_gitea_release(dst)
|
|
|
|
|
|
def _get_gitea_auth() -> dict:
|
|
"""Get Gitea auth headers from git remote 'sensey'."""
|
|
try:
|
|
_flags = subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0
|
|
r = subprocess.run(
|
|
["git", "remote", "get-url", "sensey"],
|
|
capture_output=True, text=True, cwd=PROJECT_DIR, creationflags=_flags,
|
|
)
|
|
m = re.match(r"https://([^:]+):([^@]+)@", r.stdout.strip())
|
|
if m:
|
|
user, pw = m.groups()
|
|
token = base64.b64encode(f"{user}:{pw}".encode()).decode()
|
|
return {"Authorization": f"Basic {token}"}
|
|
except Exception:
|
|
pass
|
|
return {}
|
|
|
|
|
|
_GITEA_API = "https://git.sensey24.ru/api/v1/repos/aibot777/server-manager"
|
|
|
|
|
|
def _generate_changelog() -> str:
|
|
"""Generate changelog from git commits since previous tag."""
|
|
try:
|
|
# Get all tags sorted by semver
|
|
raw = subprocess.check_output(
|
|
["git", "tag", "--list", "v*"],
|
|
text=True, stderr=subprocess.DEVNULL,
|
|
).strip()
|
|
tags = [t for t in raw.splitlines() if re.match(r'^v\d+\.\d+\.\d+$', t)]
|
|
tags.sort(key=lambda t: tuple(int(x) for x in t[1:].split(".")))
|
|
|
|
current_tag = f"v{__version__}"
|
|
# Find previous tag (exclude current if it exists)
|
|
prev_tags = [t for t in tags if t != current_tag]
|
|
if prev_tags:
|
|
prev_tag = prev_tags[-1]
|
|
log_range = f"{prev_tag}..HEAD"
|
|
else:
|
|
log_range = "HEAD~20..HEAD"
|
|
|
|
# Get commits
|
|
raw_log = subprocess.check_output(
|
|
["git", "log", log_range, "--oneline", "--no-merges"],
|
|
text=True, stderr=subprocess.DEVNULL,
|
|
).strip()
|
|
|
|
if not raw_log:
|
|
return f"Release {current_tag}"
|
|
|
|
# Format: strip commit hash, keep message
|
|
lines = []
|
|
for line in raw_log.splitlines():
|
|
parts = line.split(" ", 1)
|
|
if len(parts) == 2:
|
|
lines.append(f"- {parts[1]}")
|
|
return f"## What's New in {current_tag}\n\n" + "\n".join(lines)
|
|
except Exception as exc:
|
|
print(f"Changelog generation failed: {exc}")
|
|
return f"Release v{__version__}"
|
|
|
|
|
|
def publish_gitea_release(exe_path: str):
|
|
"""Create a Gitea release and upload the exe as asset."""
|
|
auth = _get_gitea_auth()
|
|
if not auth:
|
|
print("Gitea publish skipped: no auth (git remote 'sensey' not found)")
|
|
return
|
|
|
|
tag = f"v{__version__}"
|
|
filename = os.path.basename(exe_path)
|
|
changelog = _generate_changelog()
|
|
|
|
# Create release
|
|
try:
|
|
data = json.dumps({
|
|
"tag_name": tag,
|
|
"name": tag,
|
|
"body": changelog,
|
|
}).encode()
|
|
req = urllib.request.Request(
|
|
f"{_GITEA_API}/releases",
|
|
data=data,
|
|
headers={**auth, "Content-Type": "application/json"},
|
|
method="POST",
|
|
)
|
|
resp = urllib.request.urlopen(req, timeout=30)
|
|
release = json.loads(resp.read())
|
|
release_id = release["id"]
|
|
except urllib.error.HTTPError as e:
|
|
if e.code == 409:
|
|
print(f"Gitea release {tag} already exists, skipping")
|
|
else:
|
|
print(f"Gitea release creation failed: {e}")
|
|
return
|
|
except Exception as e:
|
|
print(f"Gitea release creation failed: {e}")
|
|
return
|
|
|
|
# Upload asset
|
|
try:
|
|
with open(exe_path, "rb") as f:
|
|
file_data = f.read()
|
|
req = urllib.request.Request(
|
|
f"{_GITEA_API}/releases/{release_id}/assets?name={filename}",
|
|
data=file_data,
|
|
headers={**auth, "Content-Type": "application/octet-stream"},
|
|
method="POST",
|
|
)
|
|
resp = urllib.request.urlopen(req, timeout=180)
|
|
asset = json.loads(resp.read())
|
|
size_mb = asset["size"] / (1024 * 1024)
|
|
print(f"Gitea release published: {tag} ({filename}, {size_mb:.1f} MB)")
|
|
except Exception as e:
|
|
print(f"Gitea asset upload failed: {e}")
|
|
|
|
|
|
def _version_key(path: str):
|
|
"""Extract (major, minor, patch) tuple for semver sorting."""
|
|
m = re.search(r'v(\d+)\.(\d+)\.(\d+)', os.path.basename(path))
|
|
if m:
|
|
return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
|
|
return (0, 0, 0)
|
|
|
|
|
|
|
|
def cleanup_old_releases():
|
|
"""Keep the first release (v1.0.0) and the last 5 releases, delete the rest."""
|
|
import glob
|
|
|
|
pattern = os.path.join(RELEASES_DIR, f"{__app_name__}-v*")
|
|
all_exes = sorted(glob.glob(pattern), key=_version_key)
|
|
|
|
if len(all_exes) <= 6: # first + 5 = 6, nothing to clean
|
|
return
|
|
|
|
# First release is always all_exes[0] (sorted by semver, v1.0.0 < v1.8.x)
|
|
first = all_exes[0]
|
|
last_5 = all_exes[-5:]
|
|
keep = set([first] + last_5)
|
|
|
|
removed = []
|
|
_flags = subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0
|
|
for f in all_exes:
|
|
if f not in keep:
|
|
# Use git rm so deletion is staged for commit
|
|
try:
|
|
subprocess.run(
|
|
["git", "rm", "-f", "--quiet", f],
|
|
cwd=PROJECT_DIR, creationflags=_flags,
|
|
capture_output=True,
|
|
)
|
|
except Exception:
|
|
# Fallback: just delete the file
|
|
if os.path.exists(f):
|
|
os.remove(f)
|
|
removed.append(os.path.basename(f))
|
|
|
|
if removed:
|
|
print(f"Cleaned {len(removed)} old releases: {', '.join(removed)}")
|
|
|
|
|
|
def deploy_shared_files():
|
|
"""Auto-deploy shared CLI files and local agent integrations after build."""
|
|
from core.claude_setup import (
|
|
install_claude_skill,
|
|
install_codex_skill,
|
|
install_gemini_skill,
|
|
install_ssh_script,
|
|
)
|
|
|
|
deploy_steps = [
|
|
install_ssh_script,
|
|
install_claude_skill,
|
|
install_codex_skill,
|
|
install_gemini_skill,
|
|
]
|
|
|
|
deployed = []
|
|
for step in deploy_steps:
|
|
try:
|
|
result = step()
|
|
if result:
|
|
deployed.append(result.replace("\n", "; "))
|
|
except Exception as exc:
|
|
print(f"WARNING: auto-deploy step failed ({step.__name__}): {exc}")
|
|
|
|
if deployed:
|
|
print("Auto-deployed to local:")
|
|
for item in deployed:
|
|
print(f"- {item}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
if "--clean" in sys.argv:
|
|
clean()
|
|
build()
|