Compare commits

...

3 Commits

Author SHA1 Message Date
chrome-storm-c442
c21b263b24 v1.9.25: show server group in --list and --info CLI output
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 06:55:32 -05:00
chrome-storm-c442
464b803b42 v1.9.24: add --s3-create-bucket to CLI and skill
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 06:21:27 -05:00
chrome-storm-c442
bbef9ad014 v1.9.23: S3 create/delete bucket GUI buttons
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 06:16:50 -05:00
10 changed files with 162 additions and 10 deletions

View File

@@ -51,7 +51,7 @@ python ~/.server-connections/ssh.py --status # online/offline
| Тип | Команды | | Тип | Команды |
|-----|---------| |-----|---------|
| `ssh`/`telnet` | `ALIAS "cmd"`, `--upload ALIAS local remote`, `--download ALIAS remote local` | | `ssh`/`telnet` | `ALIAS "cmd"`, `--upload ALIAS local remote`, `--download ALIAS remote local` |
| `s3` (MinIO и др.) | `--s3-buckets ALIAS`, `--s3-ls ALIAS bucket/prefix`, `--s3-upload ALIAS local bucket/key`, `--s3-download ALIAS bucket/key local`, `--s3-delete ALIAS bucket/key`, `--s3-url ALIAS bucket/key [SEC]` | | `s3` (MinIO и др.) | `--s3-buckets ALIAS`, `--s3-ls ALIAS bucket/prefix`, `--s3-upload ALIAS local bucket/key`, `--s3-download ALIAS bucket/key local`, `--s3-delete ALIAS bucket/key`, `--s3-url ALIAS bucket/key [SEC]`, `--s3-create-bucket ALIAS name` |
| `mariadb`/`mssql`/`postgresql` | `--sql ALIAS "SELECT ..."`, `--sql-databases ALIAS`, `--sql-tables ALIAS [db]` | | `mariadb`/`mssql`/`postgresql` | `--sql ALIAS "SELECT ..."`, `--sql-databases ALIAS`, `--sql-tables ALIAS [db]` |
| `redis` | `--redis ALIAS "GET key"`, `--redis-info ALIAS`, `--redis-keys ALIAS "pattern"` | | `redis` | `--redis ALIAS "GET key"`, `--redis-info ALIAS`, `--redis-keys ALIAS "pattern"` |
| `grafana` | `--grafana-dashboards ALIAS`, `--grafana-alerts ALIAS` | | `grafana` | `--grafana-dashboards ALIAS`, `--grafana-alerts ALIAS` |

View File

@@ -390,6 +390,12 @@ _EN = {
"s3_uploading_n": "Uploading {count} files...", "s3_uploading_n": "Uploading {count} files...",
"s3_uploaded_n": "Uploaded {count} files", "s3_uploaded_n": "Uploaded {count} files",
"s3_upload_partial": "Uploaded {ok}/{total} files", "s3_upload_partial": "Uploaded {ok}/{total} files",
"s3_create_bucket": "Create Bucket",
"s3_bucket_name_prompt": "Bucket name:",
"s3_delete_bucket": "Delete Bucket",
"s3_delete_bucket_confirm": "Delete bucket \"{name}\"? It must be empty.",
"s3_bucket_created": "Bucket \"{name}\" created",
"s3_bucket_deleted": "Bucket \"{name}\" deleted",
"s3_new_folder": "New Folder", "s3_new_folder": "New Folder",
"s3_folder_name_prompt": "Folder name:", "s3_folder_name_prompt": "Folder name:",
"s3_creating_folder": "Creating folder...", "s3_creating_folder": "Creating folder...",
@@ -907,6 +913,12 @@ _RU = {
"s3_uploading_n": "Загрузка {count} файлов...", "s3_uploading_n": "Загрузка {count} файлов...",
"s3_uploaded_n": "Загружено {count} файлов", "s3_uploaded_n": "Загружено {count} файлов",
"s3_upload_partial": "Загружено {ok}/{total} файлов", "s3_upload_partial": "Загружено {ok}/{total} файлов",
"s3_create_bucket": "Создать бакет",
"s3_bucket_name_prompt": "Имя бакета:",
"s3_delete_bucket": "Удалить бакет",
"s3_delete_bucket_confirm": "Удалить бакет \"{name}\"? Он должен быть пустым.",
"s3_bucket_created": "Бакет \"{name}\" создан",
"s3_bucket_deleted": "Бакет \"{name}\" удалён",
"s3_new_folder": "Новая папка", "s3_new_folder": "Новая папка",
"s3_folder_name_prompt": "Имя папки:", "s3_folder_name_prompt": "Имя папки:",
"s3_creating_folder": "Создание папки...", "s3_creating_folder": "Создание папки...",
@@ -1424,6 +1436,12 @@ _ZH = {
"s3_uploading_n": "正在上传 {count} 个文件...", "s3_uploading_n": "正在上传 {count} 个文件...",
"s3_uploaded_n": "已上传 {count} 个文件", "s3_uploaded_n": "已上传 {count} 个文件",
"s3_upload_partial": "已上传 {ok}/{total} 个文件", "s3_upload_partial": "已上传 {ok}/{total} 个文件",
"s3_create_bucket": "创建存储桶",
"s3_bucket_name_prompt": "存储桶名称:",
"s3_delete_bucket": "删除存储桶",
"s3_delete_bucket_confirm": "删除存储桶 \"{name}\"?必须为空。",
"s3_bucket_created": "存储桶 \"{name}\" 已创建",
"s3_bucket_deleted": "存储桶 \"{name}\" 已删除",
"s3_new_folder": "新建文件夹", "s3_new_folder": "新建文件夹",
"s3_folder_name_prompt": "文件夹名称:", "s3_folder_name_prompt": "文件夹名称:",
"s3_creating_folder": "创建文件夹中...", "s3_creating_folder": "创建文件夹中...",

View File

@@ -518,3 +518,29 @@ class S3Client:
return resp.get("ContentLength", 0) return resp.get("ContentLength", 0)
except Exception: except Exception:
return 0 return 0
def create_bucket(self, bucket_name: str) -> bool:
"""Create a new S3 bucket."""
if not self._ensure_connected():
return False
try:
self._client.create_bucket(Bucket=bucket_name)
self._last_ok = time.time()
log.info("S3 bucket created: %s", bucket_name)
return True
except Exception as exc:
log.error("S3 create_bucket failed: %s", exc)
return False
def delete_bucket(self, bucket_name: str) -> bool:
"""Delete an empty S3 bucket."""
if not self._ensure_connected():
return False
try:
self._client.delete_bucket(Bucket=bucket_name)
self._last_ok = time.time()
log.info("S3 bucket deleted: %s", bucket_name)
return True
except Exception as exc:
log.error("S3 delete_bucket failed: %s", exc)
return False

View File

@@ -153,7 +153,24 @@ class S3Tab(ctk.CTkFrame):
bucket_frame, variable=self._bucket_var, values=[""], bucket_frame, variable=self._bucket_var, values=[""],
width=200, command=self._on_bucket_change, width=200, command=self._on_bucket_change,
) )
self._bucket_menu.pack(side="left", padx=(0, 15)) self._bucket_menu.pack(side="left", padx=(0, 5))
# Create bucket [+]
self._create_bucket_btn = ctk.CTkButton(
bucket_frame, text="+", width=28, height=28,
corner_radius=6, font=ctk.CTkFont(size=14, weight="bold"),
command=self._create_bucket,
)
self._create_bucket_btn.pack(side="left", padx=(0, 3))
# Delete bucket [🗑]
self._delete_bucket_btn = ctk.CTkButton(
bucket_frame, text="\U0001f5d1", width=28, height=28,
corner_radius=6, fg_color="#dc2626", hover_color="#b91c1c",
font=ctk.CTkFont(size=13),
command=self._delete_bucket,
)
self._delete_bucket_btn.pack(side="left", padx=(0, 15))
# Path display # Path display
self._path_label = ctk.CTkLabel( self._path_label = ctk.CTkLabel(
@@ -626,6 +643,64 @@ class S3Tab(ctk.CTkFrame):
threading.Thread(target=_do, daemon=True).start() threading.Thread(target=_do, daemon=True).start()
def _create_bucket(self):
"""Prompt for bucket name and create it."""
if not self._client:
return
dialog = ctk.CTkInputDialog(
text=t("s3_bucket_name_prompt"),
title=t("s3_create_bucket"),
)
name = dialog.get_input()
if not name or not name.strip():
return
name = name.strip()
self._status_label.configure(text="...")
def _do():
ok = self._client.create_bucket(name)
self.after(0, lambda: self._on_bucket_created(ok, name))
threading.Thread(target=_do, daemon=True).start()
def _on_bucket_created(self, ok: bool, name: str):
if ok:
self._status_label.configure(
text=t("s3_bucket_created").format(name=name))
self._current_bucket = name
self._load_buckets()
else:
self._status_label.configure(text=t("s3_folder_failed"))
def _delete_bucket(self):
"""Delete the currently selected bucket (must be empty)."""
if not self._client or not self._current_bucket:
return
from tkinter import messagebox
ok = messagebox.askyesno(
t("s3_delete_bucket"),
t("s3_delete_bucket_confirm").format(name=self._current_bucket),
)
if not ok:
return
bucket_name = self._current_bucket
self._status_label.configure(text="...")
def _do():
ok = self._client.delete_bucket(bucket_name)
self.after(0, lambda: self._on_bucket_deleted(ok, bucket_name))
threading.Thread(target=_do, daemon=True).start()
def _on_bucket_deleted(self, ok: bool, name: str):
if ok:
self._status_label.configure(
text=t("s3_bucket_deleted").format(name=name))
self._current_bucket = ""
self._load_buckets()
else:
self._status_label.configure(text=t("s3_delete_failed"))
def _go_back(self): def _go_back(self):
if self._nav_stack: if self._nav_stack:
self._current_prefix = self._nav_stack.pop() self._current_prefix = self._nav_stack.pop()

View File

@@ -32,7 +32,7 @@
| `telnet` | `ALIAS "command"` (без SFTP/sudo/ключей) | `--upload`, `--download` | | `telnet` | `ALIAS "command"` (без SFTP/sudo/ключей) | `--upload`, `--download` |
| `mariadb` / `mssql` / `postgresql` | `--sql`, `--sql-databases`, `--sql-tables` | `ALIAS "command"` | | `mariadb` / `mssql` / `postgresql` | `--sql`, `--sql-databases`, `--sql-tables` | `ALIAS "command"` |
| `redis` | `--redis`, `--redis-info`, `--redis-keys` | `ALIAS "command"` | | `redis` | `--redis`, `--redis-info`, `--redis-keys` | `ALIAS "command"` |
| `s3` (MinIO, AWS S3, и др.) | `--s3-buckets`, `--s3-ls`, `--s3-upload`, `--s3-download`, `--s3-delete`, `--s3-url` | `ALIAS "command"`, `--upload`, `--download` | | `s3` (MinIO, AWS S3, и др.) | `--s3-buckets`, `--s3-ls`, `--s3-upload`, `--s3-download`, `--s3-delete`, `--s3-url`, `--s3-create-bucket` | `ALIAS "command"`, `--upload`, `--download` |
| `grafana` | `--grafana-dashboards`, `--grafana-alerts` | `ALIAS "command"` | | `grafana` | `--grafana-dashboards`, `--grafana-alerts` | `ALIAS "command"` |
| `prometheus` | `--prom-query`, `--prom-targets`, `--prom-alerts` | `ALIAS "command"` | | `prometheus` | `--prom-query`, `--prom-targets`, `--prom-alerts` | `ALIAS "command"` |
| `winrm` | `--ps`, `--cmd` | `ALIAS "command"` | | `winrm` | `--ps`, `--cmd` | `ALIAS "command"` |
@@ -64,7 +64,7 @@ python ~/.server-connections/ssh.py --sql-databases "mariadb-alias"
## Общие команды ## Общие команды
### Список серверов (безопасный — alias, тип, ключ, заметки) ### Список серверов (безопасный — alias, тип, группа, ключ, заметки)
```bash ```bash
python ~/.server-connections/ssh.py --list python ~/.server-connections/ssh.py --list
``` ```
@@ -213,6 +213,11 @@ python ~/.server-connections/ssh.py --s3-url ALIAS bucket/key 86400
``` ```
По умолчанию ссылка действует 1 час (3600 сек). Второй аргумент — время жизни в секундах (например 86400 = 24 часа). По умолчанию ссылка действует 1 час (3600 сек). Второй аргумент — время жизни в секундах (например 86400 = 24 часа).
### Создать бакет
```bash
python ~/.server-connections/ssh.py --s3-create-bucket ALIAS bucket-name
```
### Типичный workflow: "создай папку и залей файл" ### Типичный workflow: "создай папку и залей файл"
```bash ```bash
# 1. Посмотри бакеты # 1. Посмотри бакеты

View File

@@ -43,6 +43,7 @@ S3 (type: s3):
python ssh.py --s3-download ALIAS bucket/key local # download file python ssh.py --s3-download ALIAS bucket/key local # download file
python ssh.py --s3-delete ALIAS bucket/key # delete object python ssh.py --s3-delete ALIAS bucket/key # delete object
python ssh.py --s3-url ALIAS bucket/key [SEC] # presigned URL (default 3600s) python ssh.py --s3-url ALIAS bucket/key [SEC] # presigned URL (default 3600s)
python ssh.py --s3-create-bucket ALIAS name # create bucket
WinRM (type: winrm): WinRM (type: winrm):
python ssh.py --ps ALIAS "Get-Process" # PowerShell via WinRM python ssh.py --ps ALIAS "Get-Process" # PowerShell via WinRM
@@ -101,6 +102,11 @@ def load_servers():
return data, {s["alias"]: s for s in data.get("servers", [])} return data, {s["alias"]: s for s in data.get("servers", [])}
def _group_map(data: dict) -> dict:
"""Map group UUID → group name."""
return {g["id"]: g.get("name", "") for g in data.get("groups", [])}
def save_servers(data): def save_servers(data):
servers_file = _get_servers_file() servers_file = _get_servers_file()
text = json.dumps(data, indent=2, ensure_ascii=False) text = json.dumps(data, indent=2, ensure_ascii=False)
@@ -778,7 +784,8 @@ def ping_server(server: dict):
def list_servers(full=False): def list_servers(full=False):
_, servers = load_servers() data, servers = load_servers()
groups = _group_map(data)
if full: if full:
# WARNING: full mode shows sensitive data (IP, port, user) # WARNING: full mode shows sensitive data (IP, port, user)
# Only for local/manual use, NEVER through AI API # Only for local/manual use, NEVER through AI API
@@ -790,13 +797,14 @@ def list_servers(full=False):
print(f"{alias:<20} {s['ip']:<20} {s.get('port', 22):<8} {s.get('user', 'root'):<10} {has_key:<6}") print(f"{alias:<20} {s['ip']:<20} {s.get('port', 22):<8} {s.get('user', 'root'):<10} {has_key:<6}")
else: else:
# Safe mode: only aliases (no IPs, ports, users) # Safe mode: only aliases (no IPs, ports, users)
print(f"{'Alias':<20} {'Type':<10} {'Key':<6} {'Notes'}") print(f"{'Alias':<20} {'Type':<10} {'Group':<14} {'Key':<6} {'Notes'}")
print("-" * 70) print("-" * 80)
for alias, s in servers.items(): for alias, s in servers.items():
has_key = "yes" if os.path.exists(SSH_KEY_PATH) else "no" has_key = "yes" if os.path.exists(SSH_KEY_PATH) else "no"
stype = s.get("type", "ssh") stype = s.get("type", "ssh")
group_name = groups.get(s.get("group", ""), "-")
notes = s.get("notes", "") notes = s.get("notes", "")
print(f"{alias:<20} {stype:<10} {has_key:<6} {notes}") print(f"{alias:<20} {stype:<10} {group_name:<14} {has_key:<6} {notes}")
def _resolve_alias(alias: str, servers: dict) -> str: def _resolve_alias(alias: str, servers: dict) -> str:
@@ -830,12 +838,16 @@ def _resolve_alias(alias: str, servers: dict) -> str:
def server_info(alias: str): def server_info(alias: str):
"""Show server info safe for AI context — NO ip, user, password, port, totp_secret.""" """Show server info safe for AI context — NO ip, user, password, port, totp_secret."""
_, servers = load_servers() data, servers = load_servers()
groups = _group_map(data)
alias = _resolve_alias(alias, servers) alias = _resolve_alias(alias, servers)
s = servers[alias] s = servers[alias]
has_key = "yes" if os.path.exists(SSH_KEY_PATH) else "no" has_key = "yes" if os.path.exists(SSH_KEY_PATH) else "no"
print(f"Alias: {s['alias']}") print(f"Alias: {s['alias']}")
print(f"Type: {s.get('type', 'ssh')}") print(f"Type: {s.get('type', 'ssh')}")
group_name = groups.get(s.get("group", ""), "")
if group_name:
print(f"Group: {group_name}")
print(f"Key: {has_key}") print(f"Key: {has_key}")
print(f"Auth: {s.get('auth', 'password')}") print(f"Auth: {s.get('auth', 'password')}")
print(f"2FA: {'yes' if s.get('totp_secret') else 'no'}") print(f"2FA: {'yes' if s.get('totp_secret') else 'no'}")
@@ -1481,6 +1493,17 @@ def s3_url(server: dict, remote_path: str, expires: int = 3600):
sys.exit(1) sys.exit(1)
def s3_create_bucket(server: dict, bucket_name: str):
"""Create a new S3 bucket."""
client = _get_s3_client(server)
try:
client.create_bucket(Bucket=bucket_name)
print(f"Bucket created: {bucket_name}")
except Exception as e:
print(f"ERROR: {e}", file=sys.stderr)
sys.exit(1)
# ── Grafana commands ────────────────────────────────── # ── Grafana commands ──────────────────────────────────
def _grafana_request(server: dict, endpoint: str) -> dict: def _grafana_request(server: dict, endpoint: str) -> dict:
@@ -1791,6 +1814,11 @@ def main():
expires = int(sys.argv[4]) if len(sys.argv) >= 5 else 3600 expires = int(sys.argv[4]) if len(sys.argv) >= 5 else 3600
s3_url(servers[alias], sys.argv[3], expires) s3_url(servers[alias], sys.argv[3], expires)
sys.exit(0) sys.exit(0)
if cmd == "--s3-create-bucket" and len(sys.argv) >= 4:
_, servers = load_servers()
alias = _resolve_alias(sys.argv[2], servers)
s3_create_bucket(servers[alias], sys.argv[3])
sys.exit(0)
# ── Grafana commands ── # ── Grafana commands ──
if cmd == "--grafana-dashboards" and len(sys.argv) >= 3: if cmd == "--grafana-dashboards" and len(sys.argv) >= 3:

View File

@@ -1,6 +1,6 @@
"""Version info for ServerManager.""" """Version info for ServerManager."""
__version__ = "1.9.22" __version__ = "1.9.25"
__app_name__ = "ServerManager" __app_name__ = "ServerManager"
__author__ = "aibot777" __author__ = "aibot777"
__description__ = "Desktop GUI for managing remote servers" __description__ = "Desktop GUI for managing remote servers"