feat: multi-type server support — SQL, Redis, Grafana, Prometheus, Telnet, WinRM, RDP/VNC

Full implementation of multi-type server management across GUI and CLI:

New clients: SQLClient (MariaDB/MSSQL/PostgreSQL), RedisClient, GrafanaClient,
PrometheusClient, TelnetSession, WinRMClient, RemoteDesktopLauncher.

New GUI tabs: QueryTab (SQL editor + Treeview), RedisTab (console + history),
GrafanaTab (dashboards + alerts), PrometheusTab (PromQL + targets),
PowershellTab (PS/CMD), LaunchTab (RDP/VNC external client).

Infrastructure: TAB_REGISTRY for conditional tabs per server type,
adaptive server_dialog fields, colored type badges in sidebar,
status checker for all types (SSH/TCP/SQL/Redis/HTTP), 100+ i18n keys.

CLI: ssh.py extended with --sql, --redis, --grafana-*, --prom-*, --ps, --cmd.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
chrome-storm-c442
2026-02-24 09:35:24 -05:00
parent 2d1d942ddc
commit eede67e6a9
26 changed files with 3990 additions and 168 deletions

View File

@@ -1,7 +1,6 @@
"""
Connection factory — stubs for non-SSH connection types.
SSH is fully implemented via SSHClientWrapper.
Other types are placeholders for future implementation.
Connection factory — creates connection wrappers based on server type.
Uses lazy imports so missing optional dependencies don't crash the app.
"""
from core.ssh_client import SSHClientWrapper
@@ -14,12 +13,32 @@ def create_connection(server: dict, key_path: str = ""):
if server_type == "ssh":
return SSHClientWrapper(server, key_path)
# Stubs for future types
if server_type == "rdp":
raise NotImplementedError("RDP connections — use mstsc.exe or rdesktop")
if server_type == "telnet":
raise NotImplementedError("Telnet connections — planned")
from core.telnet_client import TelnetSession
return TelnetSession(server)
if server_type in ("mariadb", "mssql", "postgresql"):
raise NotImplementedError(f"{server_type.upper()} connections — planned")
from core.sql_client import SQLClient
return SQLClient(server)
if server_type == "redis":
from core.redis_client import RedisClient
return RedisClient(server)
if server_type == "grafana":
from core.grafana_client import GrafanaClient
return GrafanaClient(server)
if server_type == "prometheus":
from core.prometheus_client import PrometheusClient
return PrometheusClient(server)
if server_type == "winrm":
from core.winrm_client import WinRMClient
return WinRMClient(server)
if server_type in ("rdp", "vnc"):
from core.remote_desktop import RemoteDesktopLauncher
return RemoteDesktopLauncher()
raise ValueError(f"Unknown server type: {server_type}")

170
core/grafana_client.py Normal file
View File

@@ -0,0 +1,170 @@
"""
Grafana API client for ServerManager.
Provides dashboard listing, alert management, datasource queries,
and annotation creation via Grafana HTTP API.
"""
from __future__ import annotations
from typing import Any
from core.logger import log
class GrafanaClient:
"""Client for interacting with a Grafana instance via its HTTP API."""
def __init__(self, server: dict) -> None:
"""
Initialize the Grafana client.
Args:
server: dict with keys: ip, port, api_token, use_ssl
"""
self.ip: str = server["ip"]
self.port: int = int(server["port"])
self.api_token: str = server["api_token"]
self.use_ssl: bool = bool(server.get("use_ssl", False))
scheme = "https" if self.use_ssl else "http"
self.base_url: str = f"{scheme}://{self.ip}:{self.port}"
self.headers: dict[str, str] = {
"Authorization": f"Bearer {self.api_token}",
"Content-Type": "application/json",
}
self.timeout: int = 10
def _get(self, path: str, params: dict | None = None) -> Any:
"""Send a GET request to the Grafana API."""
import requests
url = f"{self.base_url}{path}"
log.debug("Grafana GET %s", url)
resp = requests.get(
url, headers=self.headers, params=params, timeout=self.timeout
)
resp.raise_for_status()
return resp.json()
def _post(self, path: str, json_data: dict | None = None) -> Any:
"""Send a POST request to the Grafana API."""
import requests
url = f"{self.base_url}{path}"
log.debug("Grafana POST %s", url)
resp = requests.post(
url, headers=self.headers, json=json_data, timeout=self.timeout
)
resp.raise_for_status()
return resp.json()
def check_connection(self) -> bool:
"""
Check connectivity to Grafana via GET /api/health.
Returns:
True if Grafana responds successfully, False otherwise.
"""
try:
result = self._get("/api/health")
healthy = result.get("database", "") == "ok"
log.info("Grafana health check: %s", "OK" if healthy else "FAIL")
return healthy
except Exception as exc:
log.error("Grafana health check failed: %s", exc)
return False
def list_dashboards(self) -> list[dict]:
"""
List all dashboards via GET /api/search.
Returns:
List of dicts with keys: uid, title, folder, url.
"""
try:
results = self._get("/api/search", params={"type": "dash-db"})
dashboards = [
{
"uid": d.get("uid", ""),
"title": d.get("title", ""),
"folder": d.get("folderTitle", ""),
"url": d.get("url", ""),
}
for d in results
]
log.info("Grafana: found %d dashboards", len(dashboards))
return dashboards
except Exception as exc:
log.error("Grafana list_dashboards failed: %s", exc)
return []
def get_dashboard(self, uid: str) -> dict:
"""
Get a single dashboard by UID via GET /api/dashboards/uid/{uid}.
Args:
uid: Dashboard UID string.
Returns:
Full dashboard JSON dict, or empty dict on error.
"""
try:
result = self._get(f"/api/dashboards/uid/{uid}")
log.info("Grafana: loaded dashboard '%s'", uid)
return result
except Exception as exc:
log.error("Grafana get_dashboard(%s) failed: %s", uid, exc)
return {}
def list_alerts(self) -> list[dict]:
"""
List provisioned alert rules via GET /api/v1/provisioning/alert-rules.
Returns:
List of alert rule dicts, or empty list on error.
"""
try:
results = self._get("/api/v1/provisioning/alert-rules")
log.info("Grafana: found %d alert rules", len(results))
return results
except Exception as exc:
log.error("Grafana list_alerts failed: %s", exc)
return []
def list_datasources(self) -> list[dict]:
"""
List all datasources via GET /api/datasources.
Returns:
List of datasource dicts, or empty list on error.
"""
try:
results = self._get("/api/datasources")
log.info("Grafana: found %d datasources", len(results))
return results
except Exception as exc:
log.error("Grafana list_datasources failed: %s", exc)
return []
def create_annotation(self, text: str, tags: list[str] | None = None) -> dict:
"""
Create a global annotation via POST /api/annotations.
Args:
text: Annotation text/description.
tags: Optional list of tag strings.
Returns:
API response dict, or empty dict on error.
"""
payload: dict[str, Any] = {"text": text}
if tags:
payload["tags"] = tags
try:
result = self._post("/api/annotations", json_data=payload)
log.info("Grafana: created annotation id=%s", result.get("id"))
return result
except Exception as exc:
log.error("Grafana create_annotation failed: %s", exc)
return {}

View File

@@ -233,6 +233,12 @@ _EN = {
"totp_secret_dialog": "TOTP Secret",
"placeholder_totp_secret": "Base32 secret (optional)",
"port_out_of_range": "Port must be 1-65535",
"database": "Database",
"db_index": "DB Index",
"api_token": "API Token",
"placeholder_api_token": "Bearer token or API key",
"use_ssl": "Use SSL / HTTPS",
"db_index_must_be_number": "DB index must be a number",
# Monitoring
"monitoring": "Monitoring",
@@ -289,6 +295,107 @@ _EN = {
"recursive_delete_confirm": "Delete folder '{name}' and all contents?",
"drive": "Drive",
"active_sessions": "Active: {count}",
# Tab names (new server types)
"query": "Query",
"console": "Console",
"dashboards": "Dashboards",
"alerts": "Alerts",
"metrics": "Metrics",
"targets": "Targets",
"powershell": "PowerShell",
"launch": "Connect",
# Server dialog fields (new types)
"database": "Database",
"placeholder_database": "mydb",
"db_index": "DB Index (0-15)",
"placeholder_db_index": "0",
"api_token": "API Token",
"placeholder_api_token": "Token...",
"use_ssl": "Use SSL/TLS",
# Query tab
"query_execute": "Execute (F5)",
"query_clear": "Clear",
"query_export_csv": "Export CSV",
"query_database": "Database:",
"query_editor_placeholder": "Enter SQL query...",
"query_status_rows": "{rows} rows | {elapsed}s",
"query_error": "Error: {error}",
"query_no_results": "Query executed, no results",
"query_connected": "Connected to {alias} ({db})",
"query_connecting": "Connecting...",
"query_disconnected": "Not connected",
"query_exported": "Exported to {path}",
# Redis tab
"redis_execute": "Execute",
"redis_db": "DB:",
"redis_keys_count": "Keys: {count}",
"redis_memory": "Mem: {mem}",
"redis_prompt": "redis>",
"redis_connected": "Connected to {alias}",
"redis_connecting": "Connecting...",
"redis_disconnected": "Not connected",
"redis_error": "Error: {error}",
# Grafana tab
"grafana_dashboards": "Dashboards",
"grafana_alerts": "Alerts",
"grafana_uid": "UID",
"grafana_title": "Title",
"grafana_folder": "Folder",
"grafana_state": "State",
"grafana_name": "Name",
"grafana_severity": "Severity",
"grafana_connected": "Connected to {alias}",
"grafana_no_dashboards": "No dashboards found",
"grafana_no_alerts": "No alerts",
# Prometheus tab
"prom_query": "PromQL Query",
"prom_execute": "Execute",
"prom_targets": "Targets",
"prom_alerts": "Alerts",
"prom_job": "Job",
"prom_instance": "Instance",
"prom_health": "Health",
"prom_last_scrape": "Last Scrape",
"prom_connected": "Connected to {alias}",
"prom_no_targets": "No targets",
"prom_no_alerts": "No alerts",
"prom_placeholder": "up",
# PowerShell tab
"ps_execute": "Execute",
"ps_mode_ps": "PowerShell",
"ps_mode_cmd": "CMD",
"ps_placeholder_ps": "Get-Process...",
"ps_placeholder_cmd": "dir...",
"ps_history_empty": "No command history",
"ps_disconnected": "Not connected",
"ps_connecting": "Connecting...",
"ps_connected": "Connected to {alias}",
"ps_connect_failed": "Connection failed: {error}",
"ps_not_connected": "Not connected to server",
"ps_running": "Running...",
"ps_done": "Done",
"ps_exec_error": "Error: {error}",
# Launch tab
"launch_connect": "Connect",
"launch_rdp_info": "Remote Desktop (RDP) to {alias}",
"launch_vnc_info": "VNC connection to {alias}",
"launch_started": "Client launched",
"launch_starting": "Launching...",
"launch_error": "Launch failed: {error}",
"launch_no_server": "Select a server to connect",
# Info tab type-specific
"info_database": "Database:",
"info_ssl": "SSL:",
"info_db_index": "DB Index:",
}
_RU = {
@@ -499,6 +606,12 @@ _RU = {
"totp_secret_dialog": "TOTP-секрет",
"placeholder_totp_secret": "Base32 секрет (необязательно)",
"port_out_of_range": "Порт должен быть от 1 до 65535",
"database": "База данных",
"db_index": "Индекс БД",
"api_token": "API-токен",
"placeholder_api_token": "Bearer-токен или API-ключ",
"use_ssl": "Использовать SSL / HTTPS",
"db_index_must_be_number": "Индекс БД должен быть числом",
# Monitoring
"monitoring": "Мониторинг",
@@ -555,6 +668,107 @@ _RU = {
"recursive_delete_confirm": "Удалить папку '{name}' со всем содержимым?",
"drive": "Диск",
"active_sessions": "Активных: {count}",
# Tab names (new server types)
"query": "Запросы",
"console": "Консоль",
"dashboards": "Дашборды",
"alerts": "Оповещения",
"metrics": "Метрики",
"targets": "Цели",
"powershell": "PowerShell",
"launch": "Подключение",
# Server dialog fields (new types)
"database": "База данных",
"placeholder_database": "mydb",
"db_index": "Индекс БД (0-15)",
"placeholder_db_index": "0",
"api_token": "API-токен",
"placeholder_api_token": "Токен...",
"use_ssl": "Использовать SSL/TLS",
# Query tab
"query_execute": "Выполнить (F5)",
"query_clear": "Очистить",
"query_export_csv": "Экспорт CSV",
"query_database": "База данных:",
"query_editor_placeholder": "Введите SQL запрос...",
"query_status_rows": "{rows} строк | {elapsed}с",
"query_error": "Ошибка: {error}",
"query_no_results": "Запрос выполнен, нет результатов",
"query_connected": "Подключено к {alias} ({db})",
"query_connecting": "Подключение...",
"query_disconnected": "Не подключено",
"query_exported": "Экспортировано в {path}",
# Redis tab
"redis_execute": "Выполнить",
"redis_db": "БД:",
"redis_keys_count": "Ключей: {count}",
"redis_memory": "Память: {mem}",
"redis_prompt": "redis>",
"redis_connected": "Подключено к {alias}",
"redis_connecting": "Подключение...",
"redis_disconnected": "Не подключено",
"redis_error": "Ошибка: {error}",
# Grafana tab
"grafana_dashboards": "Дашборды",
"grafana_alerts": "Оповещения",
"grafana_uid": "UID",
"grafana_title": "Название",
"grafana_folder": "Папка",
"grafana_state": "Состояние",
"grafana_name": "Имя",
"grafana_severity": "Серьёзность",
"grafana_connected": "Подключено к {alias}",
"grafana_no_dashboards": "Дашборды не найдены",
"grafana_no_alerts": "Нет оповещений",
# Prometheus tab
"prom_query": "PromQL запрос",
"prom_execute": "Выполнить",
"prom_targets": "Цели",
"prom_alerts": "Оповещения",
"prom_job": "Job",
"prom_instance": "Инстанс",
"prom_health": "Здоровье",
"prom_last_scrape": "Последний опрос",
"prom_connected": "Подключено к {alias}",
"prom_no_targets": "Нет целей",
"prom_no_alerts": "Нет оповещений",
"prom_placeholder": "up",
# PowerShell tab
"ps_execute": "Выполнить",
"ps_mode_ps": "PowerShell",
"ps_mode_cmd": "CMD",
"ps_placeholder_ps": "Get-Process...",
"ps_placeholder_cmd": "dir...",
"ps_history_empty": "Нет истории команд",
"ps_disconnected": "Не подключено",
"ps_connecting": "Подключение...",
"ps_connected": "Подключено к {alias}",
"ps_connect_failed": "Ошибка подключения: {error}",
"ps_not_connected": "Нет подключения к серверу",
"ps_running": "Выполнение...",
"ps_done": "Готово",
"ps_exec_error": "Ошибка: {error}",
# Launch tab
"launch_connect": "Подключиться",
"launch_rdp_info": "Удалённый рабочий стол (RDP) к {alias}",
"launch_vnc_info": "VNC-подключение к {alias}",
"launch_started": "Клиент запущен",
"launch_starting": "Запуск...",
"launch_error": "Ошибка запуска: {error}",
"launch_no_server": "Выберите сервер для подключения",
# Info tab type-specific
"info_database": "База данных:",
"info_ssl": "SSL:",
"info_db_index": "Индекс БД:",
}
_ZH = {
@@ -765,6 +979,12 @@ _ZH = {
"totp_secret_dialog": "TOTP密钥",
"placeholder_totp_secret": "Base32密钥可选",
"port_out_of_range": "端口必须在1-65535之间",
"database": "数据库",
"db_index": "数据库索引",
"api_token": "API令牌",
"placeholder_api_token": "Bearer令牌或API密钥",
"use_ssl": "使用 SSL / HTTPS",
"db_index_must_be_number": "数据库索引必须是数字",
# Monitoring
"monitoring": "监控",
@@ -821,6 +1041,107 @@ _ZH = {
"recursive_delete_confirm": "删除文件夹 '{name}' 及所有内容?",
"drive": "驱动器",
"active_sessions": "活跃: {count}",
# Tab names (new server types)
"query": "查询",
"console": "控制台",
"dashboards": "仪表盘",
"alerts": "告警",
"metrics": "指标",
"targets": "目标",
"powershell": "PowerShell",
"launch": "连接",
# Server dialog fields (new types)
"database": "数据库",
"placeholder_database": "mydb",
"db_index": "数据库索引 (0-15)",
"placeholder_db_index": "0",
"api_token": "API令牌",
"placeholder_api_token": "令牌...",
"use_ssl": "使用SSL/TLS",
# Query tab
"query_execute": "执行 (F5)",
"query_clear": "清除",
"query_export_csv": "导出CSV",
"query_database": "数据库:",
"query_editor_placeholder": "输入SQL查询...",
"query_status_rows": "{rows} 行 | {elapsed}",
"query_error": "错误: {error}",
"query_no_results": "查询已执行,无结果",
"query_connected": "已连接到 {alias} ({db})",
"query_connecting": "连接中...",
"query_disconnected": "未连接",
"query_exported": "已导出到 {path}",
# Redis tab
"redis_execute": "执行",
"redis_db": "数据库:",
"redis_keys_count": "键数: {count}",
"redis_memory": "内存: {mem}",
"redis_prompt": "redis>",
"redis_connected": "已连接到 {alias}",
"redis_connecting": "连接中...",
"redis_disconnected": "未连接",
"redis_error": "错误: {error}",
# Grafana tab
"grafana_dashboards": "仪表盘",
"grafana_alerts": "告警",
"grafana_uid": "UID",
"grafana_title": "标题",
"grafana_folder": "文件夹",
"grafana_state": "状态",
"grafana_name": "名称",
"grafana_severity": "严重程度",
"grafana_connected": "已连接到 {alias}",
"grafana_no_dashboards": "未找到仪表盘",
"grafana_no_alerts": "无告警",
# Prometheus tab
"prom_query": "PromQL查询",
"prom_execute": "执行",
"prom_targets": "目标",
"prom_alerts": "告警",
"prom_job": "任务",
"prom_instance": "实例",
"prom_health": "健康",
"prom_last_scrape": "最后抓取",
"prom_connected": "已连接到 {alias}",
"prom_no_targets": "无目标",
"prom_no_alerts": "无告警",
"prom_placeholder": "up",
# PowerShell tab
"ps_execute": "执行",
"ps_mode_ps": "PowerShell",
"ps_mode_cmd": "CMD",
"ps_placeholder_ps": "Get-Process...",
"ps_placeholder_cmd": "dir...",
"ps_history_empty": "无命令历史",
"ps_disconnected": "未连接",
"ps_connecting": "连接中...",
"ps_connected": "已连接到 {alias}",
"ps_connect_failed": "连接失败: {error}",
"ps_not_connected": "未连接到服务器",
"ps_running": "执行中...",
"ps_done": "完成",
"ps_exec_error": "错误: {error}",
# Launch tab
"launch_connect": "连接",
"launch_rdp_info": "远程桌面 (RDP) 到 {alias}",
"launch_vnc_info": "VNC连接到 {alias}",
"launch_started": "客户端已启动",
"launch_starting": "启动中...",
"launch_error": "启动失败: {error}",
"launch_no_server": "选择服务器以连接",
# Info tab type-specific
"info_database": "数据库:",
"info_ssl": "SSL:",
"info_db_index": "数据库索引:",
}
_TRANSLATIONS = {

153
core/prometheus_client.py Normal file
View File

@@ -0,0 +1,153 @@
"""
Prometheus API client for ServerManager.
Provides instant queries, range queries, target discovery,
alert listing, and rule inspection via the Prometheus HTTP API.
"""
from __future__ import annotations
from typing import Any
from core.logger import log
class PrometheusClient:
"""Client for interacting with a Prometheus instance via its HTTP API."""
def __init__(self, server: dict) -> None:
"""
Initialize the Prometheus client.
Args:
server: dict with keys: ip, port, use_ssl
"""
self.ip: str = server["ip"]
self.port: int = int(server["port"])
self.use_ssl: bool = bool(server.get("use_ssl", False))
scheme = "https" if self.use_ssl else "http"
self.base_url: str = f"{scheme}://{self.ip}:{self.port}"
self.timeout: int = 10
def _get(self, path: str, params: dict | None = None) -> Any:
"""Send a GET request to the Prometheus API."""
import requests
url = f"{self.base_url}{path}"
log.debug("Prometheus GET %s", url)
resp = requests.get(url, params=params, timeout=self.timeout)
resp.raise_for_status()
return resp.json()
def check_connection(self) -> bool:
"""
Check connectivity to Prometheus via GET /-/healthy.
Returns:
True if Prometheus responds successfully, False otherwise.
"""
import requests
try:
url = f"{self.base_url}/-/healthy"
log.debug("Prometheus health check: %s", url)
resp = requests.get(url, timeout=self.timeout)
healthy = resp.status_code == 200
log.info("Prometheus health check: %s", "OK" if healthy else "FAIL")
return healthy
except Exception as exc:
log.error("Prometheus health check failed: %s", exc)
return False
def query(self, promql: str) -> dict:
"""
Execute an instant query via GET /api/v1/query.
Args:
promql: PromQL expression string.
Returns:
API response dict with 'status', 'data', etc., or empty dict on error.
"""
try:
result = self._get("/api/v1/query", params={"query": promql})
log.info("Prometheus query: %s -> status=%s", promql, result.get("status"))
return result
except Exception as exc:
log.error("Prometheus query(%s) failed: %s", promql, exc)
return {}
def query_range(
self, promql: str, start: str, end: str, step: str
) -> dict:
"""
Execute a range query via GET /api/v1/query_range.
Args:
promql: PromQL expression string.
start: Start timestamp (RFC3339 or unix timestamp).
end: End timestamp (RFC3339 or unix timestamp).
step: Query resolution step (e.g. '15s', '1m').
Returns:
API response dict, or empty dict on error.
"""
try:
result = self._get(
"/api/v1/query_range",
params={"query": promql, "start": start, "end": end, "step": step},
)
log.info("Prometheus query_range: %s -> status=%s", promql, result.get("status"))
return result
except Exception as exc:
log.error("Prometheus query_range(%s) failed: %s", promql, exc)
return {}
def targets(self) -> dict:
"""
List all scrape targets via GET /api/v1/targets.
Returns:
API response dict with active/dropped targets, or empty dict on error.
"""
try:
result = self._get("/api/v1/targets")
active = len(result.get("data", {}).get("activeTargets", []))
log.info("Prometheus: %d active targets", active)
return result
except Exception as exc:
log.error("Prometheus targets failed: %s", exc)
return {}
def alerts(self) -> dict:
"""
List active alerts via GET /api/v1/alerts.
Returns:
API response dict with alerts, or empty dict on error.
"""
try:
result = self._get("/api/v1/alerts")
count = len(result.get("data", {}).get("alerts", []))
log.info("Prometheus: %d active alerts", count)
return result
except Exception as exc:
log.error("Prometheus alerts failed: %s", exc)
return {}
def rules(self) -> dict:
"""
List all rules (recording + alerting) via GET /api/v1/rules.
Returns:
API response dict with rule groups, or empty dict on error.
"""
try:
result = self._get("/api/v1/rules")
groups = len(result.get("data", {}).get("groups", []))
log.info("Prometheus: %d rule groups", groups)
return result
except Exception as exc:
log.error("Prometheus rules failed: %s", exc)
return {}

171
core/redis_client.py Normal file
View File

@@ -0,0 +1,171 @@
"""
Redis client wrapper — duck-typed, lazy-imports redis module.
"""
from core.logger import log
_redis = None
def _get_redis():
global _redis
if _redis is None:
import redis as _r
_redis = _r
return _redis
class RedisClient:
"""Manage a single Redis connection. No ABC — duck typing."""
def __init__(self, server: dict):
self._host = server["ip"]
self._port = int(server.get("port", 6379))
self._password = server.get("password") or None
self._db = int(server.get("db_index", 0))
self._conn = None
# -- lifecycle --------------------------------------------------------
def connect(self) -> bool:
try:
r = _get_redis()
self._conn = r.Redis(
host=self._host,
port=self._port,
password=self._password,
db=self._db,
decode_responses=True,
socket_timeout=5,
socket_connect_timeout=5,
)
self._conn.ping()
log.info("Redis connected %s:%s db=%s", self._host, self._port, self._db)
return True
except Exception as exc:
log.error("Redis connect failed: %s", exc)
self._conn = None
return False
def disconnect(self):
if self._conn is not None:
try:
self._conn.close()
except Exception:
pass
self._conn = None
log.info("Redis disconnected")
def check_connection(self) -> bool:
try:
return self._conn is not None and self._conn.ping()
except Exception:
return False
# -- commands ---------------------------------------------------------
def execute(self, command: str) -> str:
"""Parse a raw command string, execute via redis-py, return formatted."""
if not self._conn:
return "[not connected]"
parts = command.split()
if not parts:
return ""
try:
result = self._conn.execute_command(*parts)
return self._format(result)
except Exception as exc:
return f"(error) {exc}"
def info(self, section=None) -> dict:
if not self._conn:
return {}
try:
return self._conn.info(section) if section else self._conn.info()
except Exception as exc:
log.error("Redis INFO failed: %s", exc)
return {}
def dbsize(self) -> int:
if not self._conn:
return 0
try:
return self._conn.dbsize()
except Exception as exc:
log.error("Redis DBSIZE failed: %s", exc)
return 0
def keys(self, pattern: str = "*", count: int = 100) -> list[str]:
"""Return up to *count* keys matching *pattern* via SCAN."""
if not self._conn:
return []
result = []
try:
cursor = 0
while len(result) < count:
cursor, batch = self._conn.scan(cursor, match=pattern, count=count)
result.extend(batch)
if cursor == 0:
break
return result[:count]
except Exception as exc:
log.error("Redis SCAN failed: %s", exc)
return []
def get_type(self, key: str) -> str:
if not self._conn:
return "none"
try:
return self._conn.type(key)
except Exception:
return "none"
def get_ttl(self, key: str) -> int:
"""Return TTL in seconds (-1 no expiry, -2 key missing)."""
if not self._conn:
return -2
try:
return self._conn.ttl(key)
except Exception:
return -2
def get_value(self, key: str) -> str:
"""Auto-detect type and return a human-readable string."""
if not self._conn:
return "(not connected)"
try:
t = self.get_type(key)
if t == "string":
return self._conn.get(key) or ""
if t == "list":
items = self._conn.lrange(key, 0, 99)
return "\n".join(f"{i}) {v}" for i, v in enumerate(items))
if t == "set":
items = list(self._conn.sscan_iter(key, count=100))[:100]
return "\n".join(items)
if t == "hash":
data = self._conn.hgetall(key)
return "\n".join(f"{k} -> {v}" for k, v in data.items())
if t == "zset":
items = self._conn.zrange(key, 0, 99, withscores=True)
return "\n".join(f"{v} (score={s})" for v, s in items)
return f"(unknown type: {t})"
except Exception as exc:
return f"(error) {exc}"
# -- helpers ----------------------------------------------------------
@staticmethod
def _format(value) -> str:
if value is None:
return "(nil)"
if isinstance(value, bool):
return "OK" if value else "(error)"
if isinstance(value, int):
return f"(integer) {value}"
if isinstance(value, (list, tuple)):
if not value:
return "(empty list)"
lines = [f"{i + 1}) {RedisClient._format(v)}" for i, v in enumerate(value)]
return "\n".join(lines)
return str(value)

124
core/remote_desktop.py Normal file
View File

@@ -0,0 +1,124 @@
"""
Remote desktop launchers — RDP and VNC via external clients.
"""
import os
import platform
import subprocess
import tempfile
from core.logger import log
class RemoteDesktopLauncher:
"""Launch external RDP/VNC clients for remote desktop connections."""
@staticmethod
def launch_rdp(server: dict) -> str:
"""Generate a .rdp temp file and launch the system RDP client.
Returns:
Status message string.
"""
hostname = server["ip"]
port = server.get("port", 3389)
user = server.get("user", "Administrator")
rdp_content = (
f"full address:s:{hostname}:{port}\r\n"
f"username:s:{user}\r\n"
"prompt for credentials:i:1\r\n"
"screen mode id:i:2\r\n"
"desktopwidth:i:1920\r\n"
"desktopheight:i:1080\r\n"
"session bpp:i:32\r\n"
"compression:i:1\r\n"
"disable wallpaper:i:0\r\n"
"allow font smoothing:i:1\r\n"
"networkautodetect:i:1\r\n"
"bandwidthautodetect:i:1\r\n"
)
alias = server.get("alias", "remote")
rdp_file = os.path.join(tempfile.gettempdir(), f"sm_{alias}.rdp")
with open(rdp_file, "w", encoding="utf-8") as f:
f.write(rdp_content)
log.info(f"RDP file created: {rdp_file}")
system = platform.system()
if system == "Windows":
os.startfile(rdp_file)
return f"RDP launched via mstsc for {alias}"
elif system == "Linux":
try:
subprocess.Popen(
["xfreerdp", f"/v:{hostname}:{port}", f"/u:{user}", "/dynamic-resolution"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
return f"RDP launched via xfreerdp for {alias}"
except FileNotFoundError:
log.warning("xfreerdp not found, trying rdesktop")
subprocess.Popen(
["rdesktop", f"{hostname}:{port}", "-u", user],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
return f"RDP launched via rdesktop for {alias}"
elif system == "Darwin":
subprocess.Popen(["open", rdp_file])
return f"RDP launched via macOS for {alias}"
else:
return f"Unsupported platform: {system}. RDP file saved to {rdp_file}"
@staticmethod
def launch_vnc(server: dict) -> str:
"""Launch a VNC viewer for the given server.
Returns:
Status message string.
"""
hostname = server["ip"]
port = server.get("port", 5900)
alias = server.get("alias", "remote")
target = f"{hostname}:{port}"
log.info(f"VNC launching for {alias} at {target}")
system = platform.system()
if system == "Windows":
# Try common VNC viewer paths
viewers = [
r"C:\Program Files\TightVNC\tvnviewer.exe",
r"C:\Program Files (x86)\TightVNC\tvnviewer.exe",
r"C:\Program Files\RealVNC\VNC Viewer\vncviewer.exe",
r"C:\Program Files (x86)\RealVNC\VNC Viewer\vncviewer.exe",
]
for viewer in viewers:
if os.path.exists(viewer):
subprocess.Popen([viewer, target])
return f"VNC launched via {os.path.basename(viewer)} for {alias}"
# Fallback: try vncviewer in PATH
try:
subprocess.Popen(["vncviewer", target])
return f"VNC launched via vncviewer for {alias}"
except FileNotFoundError:
return "No VNC viewer found. Install TightVNC or RealVNC Viewer."
elif system == "Linux":
for cmd in ["vncviewer", "xtigervncviewer", "remmina"]:
try:
args = [cmd, target] if cmd != "remmina" else [cmd, f"vnc://{target}"]
subprocess.Popen(args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
return f"VNC launched via {cmd} for {alias}"
except FileNotFoundError:
continue
return "No VNC viewer found. Install tigervnc-viewer or remmina."
elif system == "Darwin":
subprocess.Popen(["open", f"vnc://{target}"])
return f"VNC launched via macOS Screen Sharing for {alias}"
else:
return f"Unsupported platform: {system}"

View File

@@ -26,15 +26,20 @@ BACKUP_DIR = os.path.join(SHARED_DIR, "backups")
LOCAL_CONFIG_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "config")
EXAMPLE_FILE = os.path.join(LOCAL_CONFIG_DIR, "servers.example.json")
SERVER_TYPES = ["ssh", "telnet", "rdp", "mariadb", "mssql", "postgresql"]
SERVER_TYPES = ["ssh", "telnet", "rdp", "vnc", "winrm", "mariadb", "mssql", "postgresql", "redis", "grafana", "prometheus"]
DEFAULT_PORTS = {
"ssh": 22,
"telnet": 23,
"rdp": 3389,
"vnc": 5900,
"winrm": 5985,
"mariadb": 3306,
"mssql": 1433,
"postgresql": 5432,
"redis": 6379,
"grafana": 3000,
"prometheus": 9090,
}
# Auto-backup interval: 10 minutes

197
core/sql_client.py Normal file
View File

@@ -0,0 +1,197 @@
"""
SQL client — connect and query MariaDB/MySQL, PostgreSQL, MSSQL.
Drivers are imported lazily so the module loads even if a driver is missing.
"""
import time
from core.logger import log
class SQLClient:
"""Unified SQL client for MariaDB/MySQL, PostgreSQL, and MSSQL."""
DRIVERS = {"mariadb": "pymysql", "mysql": "pymysql", "postgresql": "psycopg2", "mssql": "pymssql"}
def __init__(self, server: dict):
self._type = server["type"].lower()
self._ip = server["ip"]
self._port = int(server.get("port", self._default_port()))
self._user = server["user"]
self._password = server["password"]
self._database = server.get("database", "")
self._conn = None
def _default_port(self) -> int:
return {"mariadb": 3306, "mysql": 3306, "postgresql": 5432, "mssql": 1433}.get(self._type, 3306)
# ── connection ──────────────────────────────────────────────
def connect(self) -> bool:
try:
if self._type in ("mariadb", "mysql"):
import pymysql
self._conn = pymysql.connect(
host=self._ip, port=self._port, user=self._user,
password=self._password, database=self._database or None,
charset="utf8mb4", connect_timeout=10, autocommit=True,
)
elif self._type == "postgresql":
import psycopg2
self._conn = psycopg2.connect(
host=self._ip, port=self._port, user=self._user,
password=self._password, dbname=self._database or "postgres",
connect_timeout=10,
)
self._conn.autocommit = True
elif self._type == "mssql":
import pymssql
self._conn = pymssql.connect(
server=self._ip, port=self._port, user=self._user,
password=self._password, database=self._database or "master",
login_timeout=10, charset="UTF-8",
)
else:
log.error("sql_client: unsupported type %s", self._type)
return False
log.info("sql_client: connected to %s (%s)", self._type, self._ip)
return True
except Exception as exc:
log.error("sql_client: connect failed — %s", exc)
return False
def disconnect(self):
if self._conn:
try:
self._conn.close()
except Exception:
pass
self._conn = None
log.info("sql_client: disconnected")
def check_connection(self) -> bool:
try:
cur = self._conn.cursor()
cur.execute("SELECT 1")
cur.fetchone()
cur.close()
return True
except Exception:
return False
# ── query execution ─────────────────────────────────────────
def execute_query(self, sql: str, params=None) -> dict:
"""Execute SQL and return {columns, rows, rowcount, elapsed}."""
t0 = time.perf_counter()
try:
cur = self._conn.cursor()
cur.execute(sql, params)
elapsed = time.perf_counter() - t0
if cur.description:
columns = [col[0] for col in cur.description]
rows = cur.fetchall()
else:
columns, rows = [], []
result = {
"columns": columns,
"rows": list(rows),
"rowcount": cur.rowcount,
"elapsed": round(elapsed, 4),
}
cur.close()
return result
except Exception as exc:
elapsed = time.perf_counter() - t0
log.error("sql_client: query failed (%.3fs) — %s", elapsed, exc)
raise
# ── introspection ───────────────────────────────────────────
def list_databases(self) -> list:
sql = {
"mariadb": "SHOW DATABASES",
"mysql": "SHOW DATABASES",
"postgresql": "SELECT datname FROM pg_database WHERE datistemplate = false ORDER BY datname",
"mssql": "SELECT name FROM sys.databases ORDER BY name",
}[self._type]
rows = self.execute_query(sql)["rows"]
return [r[0] for r in rows]
def list_tables(self, database: str = None) -> list:
if database:
self.switch_database(database)
if self._type in ("mariadb", "mysql"):
sql = "SHOW TABLES"
elif self._type == "postgresql":
sql = ("SELECT tablename FROM pg_tables "
"WHERE schemaname = 'public' ORDER BY tablename")
else:
sql = ("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES "
"WHERE TABLE_TYPE = 'BASE TABLE' ORDER BY TABLE_NAME")
rows = self.execute_query(sql)["rows"]
return [r[0] for r in rows]
def describe_table(self, table: str) -> list:
if self._type in ("mariadb", "mysql"):
rows = self.execute_query("SHOW COLUMNS FROM `%s`" % table)["rows"]
return [{"name": r[0], "type": r[1], "nullable": r[2] == "YES",
"key": r[3] or "", "default": r[4]} for r in rows]
elif self._type == "postgresql":
sql = (
"SELECT c.column_name, c.data_type, c.is_nullable, "
"COALESCE(tc.constraint_type, ''), c.column_default "
"FROM information_schema.columns c "
"LEFT JOIN information_schema.key_column_usage kcu "
" ON c.table_name = kcu.table_name AND c.column_name = kcu.column_name "
"LEFT JOIN information_schema.table_constraints tc "
" ON kcu.constraint_name = tc.constraint_name "
"WHERE c.table_name = %s AND c.table_schema = 'public' "
"ORDER BY c.ordinal_position"
)
rows = self.execute_query(sql, (table,))["rows"]
return [{"name": r[0], "type": r[1], "nullable": r[2] == "YES",
"key": r[3], "default": r[4]} for r in rows]
else: # mssql
sql = (
"SELECT c.COLUMN_NAME, c.DATA_TYPE, c.IS_NULLABLE, "
"ISNULL(tc.CONSTRAINT_TYPE, ''), c.COLUMN_DEFAULT "
"FROM INFORMATION_SCHEMA.COLUMNS c "
"LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu "
" ON c.TABLE_NAME = kcu.TABLE_NAME AND c.COLUMN_NAME = kcu.COLUMN_NAME "
"LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc "
" ON kcu.CONSTRAINT_NAME = tc.CONSTRAINT_NAME "
"WHERE c.TABLE_NAME = %s ORDER BY c.ORDINAL_POSITION"
)
rows = self.execute_query(sql, (table,))["rows"]
return [{"name": r[0], "type": r[1], "nullable": r[2] == "YES",
"key": r[3], "default": r[4]} for r in rows]
def current_database(self) -> str:
sql = {
"mariadb": "SELECT DATABASE()",
"mysql": "SELECT DATABASE()",
"postgresql": "SELECT current_database()",
"mssql": "SELECT DB_NAME()",
}[self._type]
rows = self.execute_query(sql)["rows"]
return rows[0][0] if rows else ""
def switch_database(self, db: str):
if self._type in ("mariadb", "mysql"):
self._conn.select_db(db)
elif self._type == "postgresql":
self.disconnect()
self._database = db
self.connect()
elif self._type == "mssql":
self.execute_query("USE %s" % db)
self._database = db
log.info("sql_client: switched to database %s", db)
def server_version(self) -> str:
sql = "SELECT VERSION()" if self._type != "mssql" else "SELECT @@VERSION"
rows = self.execute_query(sql)["rows"]
return rows[0][0] if rows else "unknown"

View File

@@ -1,7 +1,8 @@
"""
Background status checker — parallel server pings.
Background status checker — parallel server pings for all connection types.
"""
import socket
import threading
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
@@ -13,6 +14,13 @@ if TYPE_CHECKING:
from core.ssh_client import SSHClientWrapper
from core.logger import log
# Types that support native check_connection()
_SSH_TYPE = {"ssh"}
_SQL_TYPES = {"mariadb", "mssql", "postgresql"}
_REDIS_TYPE = {"redis"}
_HTTP_TYPES = {"grafana", "prometheus", "winrm"}
_TCP_TYPES = {"telnet", "rdp", "vnc"}
class StatusChecker:
def __init__(self, store: "ServerStore"):
@@ -37,10 +45,86 @@ class StatusChecker:
self._gui_callback = callback
def check_one(self, server: dict) -> bool:
"""Check a single server based on its type."""
server_type = server.get("type", "ssh")
if server_type in _SSH_TYPE:
return self._check_ssh(server)
if server_type in _SQL_TYPES:
return self._check_sql(server)
if server_type in _REDIS_TYPE:
return self._check_redis(server)
if server_type == "grafana":
return self._check_http(server, "/api/health")
if server_type == "prometheus":
return self._check_http(server, "/-/healthy")
if server_type == "winrm":
return self._check_http(server, "/wsman")
if server_type in _TCP_TYPES:
return self._check_tcp(server)
return False
def _check_ssh(self, server: dict) -> bool:
key_path = self.store.get_ssh_key_path()
wrapper = SSHClientWrapper(server, key_path)
return wrapper.check_connection()
def _check_tcp(self, server: dict) -> bool:
"""Check TCP connectivity (telnet, RDP, VNC)."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
sock.connect((server["ip"], server.get("port", 23)))
sock.close()
return True
except Exception:
return False
def _check_sql(self, server: dict) -> bool:
"""Check SQL connectivity via SELECT 1."""
try:
from core.sql_client import SQLClient
client = SQLClient(server)
result = client.connect()
if result:
ok = client.check_connection()
client.disconnect()
return ok
return False
except Exception:
return False
def _check_redis(self, server: dict) -> bool:
"""Check Redis via PING."""
try:
from core.redis_client import RedisClient
client = RedisClient(server)
result = client.connect()
if result:
ok = client.check_connection()
client.disconnect()
return ok
return False
except Exception:
return False
def _check_http(self, server: dict, path: str) -> bool:
"""Check HTTP(S) endpoint."""
try:
import requests
use_ssl = server.get("use_ssl", False)
scheme = "https" if use_ssl else "http"
url = f"{scheme}://{server['ip']}:{server.get('port', 80)}{path}"
headers = {}
api_token = server.get("api_token", "")
if api_token:
headers["Authorization"] = f"Bearer {api_token}"
resp = requests.get(url, headers=headers, timeout=5, verify=False)
return resp.status_code < 500
except Exception:
return False
def check_all_now(self):
threading.Thread(target=self._check_cycle, daemon=True).start()
@@ -61,23 +145,18 @@ class StatusChecker:
if s.get("skip_check", False):
self.store.set_status(s["alias"], "disabled")
ssh_servers = [s for s in servers if s.get("type", "ssh") == "ssh" and not s.get("skip_check", False)]
checkable = [s for s in servers if not s.get("skip_check", False)]
# Mark non-SSH (non-skipped) as unknown
for s in servers:
if s.get("type", "ssh") != "ssh" and not s.get("skip_check", False):
self.store.set_status(s["alias"], "unknown")
if not ssh_servers:
if not checkable:
return
# Parallel checks — up to 10 concurrent
max_workers = min(10, len(ssh_servers))
max_workers = min(10, len(checkable))
try:
with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = {
executor.submit(self.check_one, s): s["alias"]
for s in ssh_servers
for s in checkable
}
for future in as_completed(futures, timeout=30):
if not self._running:

180
core/telnet_client.py Normal file
View File

@@ -0,0 +1,180 @@
"""
Telnet client — interactive telnet session with the same interface as ShellSession.
"""
import asyncio
import threading
from core.logger import log
class TelnetSession:
"""Interactive telnet session — same interface as ShellSession from ssh_client.py."""
def __init__(self, server: dict, cols: int = 80, rows: int = 24):
self.server = server
self.cols = cols
self.rows = rows
self._loop: asyncio.AbstractEventLoop | None = None
self._thread: threading.Thread | None = None
self._reader = None
self._writer = None
self._running = False
# Callbacks — set by the owner
self.on_data = None # on_data(data: bytes)
self.on_disconnect = None # on_disconnect()
@property
def connected(self) -> bool:
return self._running and self._writer is not None
def connect(self):
"""Start telnet connection in a background thread running an asyncio event loop."""
self._running = True
self._thread = threading.Thread(target=self._run_loop, daemon=True)
self._thread.start()
def _run_loop(self):
"""Entry point for the background thread — creates event loop and runs connection."""
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
try:
self._loop.run_until_complete(self._async_connect())
except Exception as e:
log.debug(f"TelnetSession loop error: {e}")
finally:
self._running = False
try:
self._loop.close()
except Exception:
pass
self._loop = None
if self.on_disconnect:
self.on_disconnect()
async def _async_connect(self):
"""Async telnet connection: open, login, then read loop."""
try:
import telnetlib3
except ImportError:
log.error("telnetlib3 not installed. Run: pip install telnetlib3")
raise ImportError("telnetlib3 is required for telnet connections")
hostname = self.server["ip"]
port = self.server.get("port", 23)
user = self.server.get("user", "")
password = self.server.get("password", "")
log.info(f"TelnetSession connecting to {self.server.get('alias', '?')} port {port}")
reader, writer = await telnetlib3.open_connection(
host=hostname,
port=port,
cols=self.cols,
rows=self.rows,
connect_minwait=0.5,
)
self._reader = reader
self._writer = writer
# Login sequence — wait for prompts and send credentials
await self._login_sequence(reader, writer, user, password)
# Main read loop
await self._read_loop(reader)
async def _login_sequence(self, reader, writer, user: str, password: str):
"""Wait for login/password prompts and send credentials."""
buf = ""
timeout = 10.0 # seconds to wait for login prompt
while self._running:
try:
data = await asyncio.wait_for(reader.read(4096), timeout=timeout)
except asyncio.TimeoutError:
log.debug("TelnetSession login sequence timed out waiting for prompt")
break
except Exception:
break
if not data:
break
if self.on_data:
self.on_data(data.encode("utf-8", errors="replace") if isinstance(data, str) else data)
buf += data if isinstance(data, str) else data.decode("utf-8", errors="replace")
buf_lower = buf.lower()
if "login:" in buf_lower or "username:" in buf_lower:
writer.write(user + "\r\n")
buf = ""
continue
if "password:" in buf_lower:
writer.write(password + "\r\n")
buf = ""
break # Login done, proceed to read loop
# If we see a shell prompt, login may not be required
if buf_lower.rstrip().endswith(("$", "#", ">")):
break
log.debug("TelnetSession login sequence complete")
async def _read_loop(self, reader):
"""Read data from telnet and forward to on_data callback."""
try:
while self._running:
try:
data = await asyncio.wait_for(reader.read(65536), timeout=0.5)
except asyncio.TimeoutError:
continue
except Exception:
break
if not data:
break
raw = data.encode("utf-8", errors="replace") if isinstance(data, str) else data
if self.on_data:
self.on_data(raw)
except Exception as e:
log.debug(f"TelnetSession read loop error: {e}")
def send(self, data: bytes):
"""Send data to the telnet session."""
if not self._running or self._writer is None or self._loop is None:
return
text = data.decode("utf-8", errors="replace")
try:
self._loop.call_soon_threadsafe(self._writer.write, text)
except RuntimeError:
self._running = False
if self.on_disconnect:
self.on_disconnect()
def resize(self, cols: int, rows: int):
"""Resize terminal — NAWS negotiation if supported, otherwise no-op."""
self.cols = cols
self.rows = rows
# telnetlib3 handles NAWS during initial negotiation;
# runtime resize requires protocol-level support which
# is not reliably available, so this is a best-effort no-op.
log.debug(f"TelnetSession resize requested: {cols}x{rows} (no-op)")
def disconnect(self):
"""Close telnet session and stop background thread."""
self._running = False
if self._writer is not None:
try:
self._writer.close()
except Exception as e:
log.debug(f"TelnetSession writer close: {e}")
self._writer = None
self._reader = None
if self._loop is not None:
try:
self._loop.call_soon_threadsafe(self._loop.stop)
except RuntimeError:
pass

115
core/winrm_client.py Normal file
View File

@@ -0,0 +1,115 @@
"""
WinRM client — execute PowerShell and CMD commands on remote Windows machines.
"""
from core.logger import log
class WinRMClient:
"""Remote Windows management via WinRM (pywinrm)."""
def __init__(self, server: dict):
self.server = server
self._session = None
def connect(self) -> bool:
"""Create WinRM session and verify connectivity."""
try:
import winrm
except ImportError:
log.error("pywinrm not installed. Run: pip install pywinrm")
raise ImportError("pywinrm is required for WinRM connections")
hostname = self.server["ip"]
port = self.server.get("port", 5986 if self.server.get("use_ssl", True) else 5985)
user = self.server.get("user", "Administrator")
password = self.server.get("password", "")
use_ssl = self.server.get("use_ssl", True)
transport = "ssl" if use_ssl else "ntlm"
scheme = "https" if use_ssl else "http"
endpoint = f"{scheme}://{hostname}:{port}/wsman"
log.info(f"WinRM connecting to {self.server.get('alias', '?')} via {transport}")
self._session = winrm.Session(
target=endpoint,
auth=(user, password),
transport=transport,
server_cert_validation="ignore",
)
# Verify connection with a simple command
try:
result = self._session.run_cmd("hostname")
if result.status_code == 0:
host = result.std_out.decode("utf-8", errors="replace").strip()
log.info(f"WinRM connected to {host}")
return True
else:
log.warning(f"WinRM connection test returned exit code {result.status_code}")
return False
except Exception as e:
log.error(f"WinRM connection test failed: {e}")
self._session = None
raise
def disconnect(self):
"""Close WinRM session."""
self._session = None
log.debug("WinRM session cleared")
def check_connection(self) -> bool:
"""Check if WinRM session is alive."""
if self._session is None:
return False
try:
result = self._session.run_cmd("echo ok")
return result.status_code == 0
except Exception:
return False
def _ensure_session(self):
"""Raise if not connected."""
if self._session is None:
raise ConnectionError("WinRM session not established. Call connect() first.")
def exec_ps(self, script: str) -> tuple[str, str, int]:
"""Execute a PowerShell script on the remote host.
Returns:
(stdout, stderr, exit_code)
"""
self._ensure_session()
log.debug(f"WinRM exec_ps: {script[:120]}...")
try:
result = self._session.run_ps(script)
stdout = result.std_out.decode("utf-8", errors="replace")
stderr = result.std_err.decode("utf-8", errors="replace")
exit_code = result.status_code
log.debug(f"WinRM exec_ps exit_code={exit_code}")
return stdout, stderr, exit_code
except Exception as e:
log.error(f"WinRM exec_ps failed: {e}")
raise
def exec_cmd(self, command: str) -> tuple[str, str, int]:
"""Execute a CMD command on the remote host.
Returns:
(stdout, stderr, exit_code)
"""
self._ensure_session()
log.debug(f"WinRM exec_cmd: {command[:120]}...")
try:
result = self._session.run_cmd(command)
stdout = result.std_out.decode("utf-8", errors="replace")
stderr = result.std_err.decode("utf-8", errors="replace")
exit_code = result.status_code
log.debug(f"WinRM exec_cmd exit_code={exit_code}")
return stdout, stderr, exit_code
except Exception as e:
log.error(f"WinRM exec_cmd failed: {e}")
raise