v1.8.73: reliable SFTP upload — chunked resume, SHA256 verification, adaptive retry

- CLI (ssh.py): chunked resume upload for files >10MB with .part atomic rename
- CLI: SHA256 verification (sha256sum on Linux, Get-FileHash on Windows)
- CLI: adaptive retry count based on file size (up to 30 for large files)
- CLI: SSH keepalive 15s + window_size 4MB for stable transfers
- CLI: path injection fix in SHA256 shell commands
- CLI: Windows SFTP path fix for PowerShell Get-FileHash
- GUI (ssh_client.py): chunked upload with resume in SFTPSession
- GUI: retry up to 3 attempts with SHA256 readback in SSHClientWrapper
- GUI: keepalive 15s + window_size 4MB in both auth paths
- Tested: 5MB, 15MB, 200MB uploads to Windows SSH server (116)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
chrome-storm-c442
2026-02-28 10:03:07 -05:00
parent aea5030623
commit 7e7c1d3efc
9 changed files with 358 additions and 29 deletions

View File

@@ -7,6 +7,7 @@ import platform
import socket
import threading
import time
import hashlib
import paramiko
from core.logger import log
@@ -47,7 +48,8 @@ def _connect_client(server: dict, key_path: str, timeout: int = 15) -> paramiko.
client.connect(**kwargs)
transport = client.get_transport()
if transport is not None:
transport.set_keepalive(30)
transport.set_keepalive(15)
transport.default_window_size = 4 * 1024 * 1024
return client
except paramiko.AuthenticationException:
log.debug(f"Key auth failed for {server.get('alias', '?')}, trying password")
@@ -73,7 +75,8 @@ def _connect_client(server: dict, key_path: str, timeout: int = 15) -> paramiko.
client.connect(**kwargs)
transport = client.get_transport()
if transport is not None:
transport.set_keepalive(30)
transport.set_keepalive(15)
transport.default_window_size = 4 * 1024 * 1024
return client
raise Exception(f"No auth method for {server.get('alias', 'unknown')}")
@@ -238,19 +241,92 @@ class SSHClientWrapper:
pass
client.close()
def upload(self, local_path: str, remote_path: str, progress_cb=None):
def upload(self, local_path: str, remote_path: str, progress_cb=None,
max_retries=3):
file_size = os.path.getsize(local_path)
use_resume = file_size > 10 * 1024 * 1024
tmp_path = (remote_path + ".part") if use_resume else remote_path
for attempt in range(1, max_retries + 1):
client = self.connect()
try:
sftp = client.open_sftp()
if use_resume:
remote_offset = 0
try:
remote_offset = sftp.stat(tmp_path).st_size
if remote_offset > file_size:
sftp.remove(tmp_path)
remote_offset = 0
except (FileNotFoundError, IOError):
remote_offset = 0
with open(local_path, 'rb') as f:
f.seek(remote_offset)
if remote_offset > 0:
rf = sftp.open(tmp_path, 'r+b')
rf.seek(remote_offset)
else:
rf = sftp.open(tmp_path, 'wb')
rf.set_pipelined(True)
try:
transferred = remote_offset
while transferred < file_size:
data = f.read(256 * 1024)
if not data:
break
rf.write(data)
transferred += len(data)
if progress_cb:
progress_cb(transferred, file_size)
finally:
rf.close()
if sftp.stat(tmp_path).st_size != file_size:
raise IOError("Upload size mismatch")
# SHA256 verification via SFTP readback
local_hash = hashlib.sha256()
with open(local_path, 'rb') as f:
for chunk in iter(lambda: f.read(1024 * 1024), b''):
local_hash.update(chunk)
remote_hash = hashlib.sha256()
with sftp.open(tmp_path, 'rb') as rf:
while True:
chunk = rf.read(1024 * 1024)
if not chunk:
break
remote_hash.update(chunk)
if local_hash.hexdigest() != remote_hash.hexdigest():
sftp.remove(tmp_path)
raise IOError("SHA256 mismatch after upload")
try:
sftp.remove(remote_path)
except (FileNotFoundError, IOError):
pass
sftp.rename(tmp_path, remote_path)
else:
if progress_cb:
sftp.put(local_path, remote_path, callback=progress_cb)
else:
sftp.put(local_path, remote_path)
try:
sftp.chmod(remote_path, 0o664)
except OSError:
pass # Windows OpenSSH doesn't support chmod
pass
sftp.close()
return # Success
except (EOFError, TimeoutError, OSError,
paramiko.SSHException) as e:
log.warning(f"Upload attempt {attempt}/{max_retries}: {e}")
if attempt < max_retries:
time.sleep(2 ** attempt)
else:
raise
finally:
client.close()
@@ -385,11 +461,83 @@ class SFTPSession:
self._sftp.rename(old, new)
def upload(self, local_path: str, remote_path: str, progress_cb=None):
if progress_cb:
file_size = os.path.getsize(local_path)
if file_size > 10 * 1024 * 1024: # >10MB: chunked
self._upload_chunked(local_path, remote_path, file_size, progress_cb)
elif progress_cb:
self._sftp.put(local_path, remote_path, callback=progress_cb)
else:
self._sftp.put(local_path, remote_path)
def _upload_chunked(self, local_path, remote_path, file_size, progress_cb):
"""Chunked upload with resume, .part, atomic rename and SHA256 verification."""
tmp_path = remote_path + ".part"
remote_offset = 0
try:
remote_offset = self._sftp.stat(tmp_path).st_size
if remote_offset > file_size:
self._sftp.remove(tmp_path)
remote_offset = 0
except (FileNotFoundError, IOError):
remote_offset = 0
with open(local_path, 'rb') as f:
f.seek(remote_offset)
if remote_offset > 0:
rf = self._sftp.open(tmp_path, 'r+b')
rf.seek(remote_offset)
else:
rf = self._sftp.open(tmp_path, 'wb')
rf.set_pipelined(True)
try:
transferred = remote_offset
while transferred < file_size:
data = f.read(256 * 1024)
if not data:
break
rf.write(data)
transferred += len(data)
if progress_cb:
progress_cb(transferred, file_size)
finally:
rf.close()
# Validate: size
if self._sftp.stat(tmp_path).st_size != file_size:
raise IOError("Upload size mismatch")
# Validate: SHA256 via SFTP readback
local_hash = hashlib.sha256()
with open(local_path, 'rb') as f:
for chunk in iter(lambda: f.read(1024 * 1024), b''):
local_hash.update(chunk)
remote_hash = hashlib.sha256()
with self._sftp.open(tmp_path, 'rb') as rf:
while True:
chunk = rf.read(1024 * 1024)
if not chunk:
break
remote_hash.update(chunk)
if local_hash.hexdigest() != remote_hash.hexdigest():
self._sftp.remove(tmp_path)
raise IOError(
f"SHA256 mismatch! local={local_hash.hexdigest()[:16]}... "
f"remote={remote_hash.hexdigest()[:16]}..."
)
# Atomic rename
try:
self._sftp.remove(remote_path)
except (FileNotFoundError, IOError):
pass
self._sftp.rename(tmp_path, remote_path)
try:
self._sftp.chmod(remote_path, 0o664)
except OSError:
pass
def download(self, remote_path: str, local_path: str, progress_cb=None):
if progress_cb:
self._sftp.get(remote_path, local_path, callback=progress_cb)

View File

@@ -201,7 +201,14 @@ unset SSH_ASKPASS && unset DISPLAY && ssh ALIAS "command"
## КРИТИЧНО — Передача файлов
**ВСЕГДА используй `--upload` / `--download` для передачи файлов.** Это SFTP-протокол: надёжный, поддерживает любые размеры, показывает прогресс.
**ВСЕГДА используй `--upload` / `--download` для передачи файлов.** Это SFTP-протокол с автоматическими фичами:
- **Файлы >10MB:** chunked upload с resume — при обрыве продолжит с того места
- **Retry:** до 5 попыток с exponential backoff при сетевых ошибках
- **SHA256 верификация:** автоматическая проверка целостности после загрузки
- **Atomic rename:** запись в .part файл → проверка → переименование
- **Keepalive:** SSH keepalive каждые 15 секунд — не обрывается NAT/роутером
- **Прогресс:** 25/50/75% для файлов >1MB
```bash
# Загрузить файл на сервер (SFTP)

View File

@@ -45,6 +45,7 @@ import sys
import os
import json
import time
import hashlib
import paramiko
# Shared config — same file used by ServerManager GUI
@@ -118,11 +119,18 @@ def get_client(server: dict) -> paramiko.SSHClient:
"banner_timeout": 15,
}
def _harden_transport(c):
transport = c.get_transport()
if transport is not None:
transport.set_keepalive(15)
transport.default_window_size = 4 * 1024 * 1024
# Try key first
if os.path.exists(SSH_KEY_PATH):
try:
kwargs["key_filename"] = SSH_KEY_PATH
client.connect(**kwargs)
_harden_transport(client)
return client
except Exception:
del kwargs["key_filename"]
@@ -136,6 +144,7 @@ def get_client(server: dict) -> paramiko.SSHClient:
kwargs["look_for_keys"] = False
kwargs["allow_agent"] = False
client.connect(**kwargs)
_harden_transport(client)
return client
raise Exception(f"No auth method for {server['alias']}")
@@ -514,28 +523,193 @@ def _progress_cb(total_bytes: int):
return callback
RESUME_THRESHOLD = 10 * 1024 * 1024 # >10MB → chunked resume
CHUNK_SIZE = 256 * 1024 # 256KB per write
MAX_RETRIES = 5
def _sha256_local(path: str) -> str:
"""SHA256 hash of a local file."""
h = hashlib.sha256()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(1024 * 1024), b''):
h.update(chunk)
return h.hexdigest()
def _sha256_remote(client, remote_path: str, is_windows: bool = False) -> str | None:
"""SHA256 hash of a remote file via exec.
Returns None if sha256sum is unavailable."""
if is_windows:
# SFTP path may start with / (e.g. /C:/Users/...) — strip for PowerShell
win_path = remote_path
if win_path.startswith('/') and len(win_path) > 2 and win_path[2] == ':':
win_path = win_path[1:]
escaped = win_path.replace('"', '`"')
cmd = f'powershell -Command "(Get-FileHash -Path \\"{escaped}\\" -Algorithm SHA256).Hash"'
else:
escaped = remote_path.replace("'", "'\\''")
cmd = (f"sha256sum '{escaped}' 2>/dev/null || "
f"shasum -a 256 '{escaped}' 2>/dev/null")
try:
stdin, stdout, stderr = client.exec_command(cmd, timeout=120)
output = stdout.read().decode().strip()
exit_code = stdout.channel.recv_exit_status()
if exit_code == 0 and output:
return output.split()[0].lower()
except Exception:
pass
return None
def upload_file(server: dict, local_path: str, remote_path: str):
normalized_remote_path = _normalize_remote_path(remote_path)
normalized = _normalize_remote_path(remote_path)
file_size = os.path.getsize(local_path)
if file_size > RESUME_THRESHOLD:
_upload_resumable(server, local_path, normalized, file_size)
else:
_upload_simple(server, local_path, normalized, file_size)
def _upload_simple(server, local_path, remote_path, file_size):
"""Simple upload for files <=10MB."""
client = get_client(server)
try:
sftp = client.open_sftp()
t0 = time.time()
sftp.put(local_path, normalized_remote_path, callback=_progress_cb(file_size))
sftp.put(local_path, remote_path, callback=_progress_cb(file_size))
elapsed = time.time() - t0
try:
sftp.chmod(normalized_remote_path, 0o664)
sftp.chmod(remote_path, 0o664)
except OSError:
pass # Windows OpenSSH doesn't support chmod
pass
sftp.close()
_print_result(server, local_path, remote_path, file_size, elapsed)
finally:
client.close()
def _upload_resumable(server, local_path, remote_path, file_size):
"""Chunked upload with resume, retry, atomic rename and SHA256 verification."""
tmp_path = remote_path + ".part"
progress = _progress_cb(file_size)
is_windows = _is_windows_server(server)
t0 = time.time()
# Adaptive retries: more attempts for larger files (unstable links need resume)
max_retries = max(MAX_RETRIES, min(file_size // (10 * 1024 * 1024) + 3, 30))
for attempt in range(1, max_retries + 1):
client = None
sftp = None
try:
client = get_client(server)
sftp = client.open_sftp()
# How much is already uploaded?
remote_offset = 0
try:
remote_offset = sftp.stat(tmp_path).st_size
if remote_offset > file_size:
sftp.remove(tmp_path)
remote_offset = 0
except FileNotFoundError:
remote_offset = 0
if 0 < remote_offset < file_size:
print(f"Resume: {_fmt_size(remote_offset)}/{_fmt_size(file_size)} "
f"({remote_offset * 100 // file_size}%)")
# Write remaining data
if remote_offset < file_size:
with open(local_path, 'rb') as f:
f.seek(remote_offset)
if remote_offset > 0:
rf = sftp.open(tmp_path, 'r+b')
rf.seek(remote_offset)
else:
rf = sftp.open(tmp_path, 'wb')
rf.set_pipelined(True)
try:
transferred = remote_offset
while transferred < file_size:
data = f.read(CHUNK_SIZE)
if not data:
break
rf.write(data)
transferred += len(data)
progress(transferred, file_size)
finally:
rf.close()
# === VALIDATE: size ===
actual = sftp.stat(tmp_path).st_size
if actual != file_size:
raise IOError(f"Size mismatch: expected {file_size}, got {actual}")
# === VALIDATE: SHA256 before rename (always, even if resumed) ===
print("Verifying SHA256...", end=" ", flush=True)
local_hash = _sha256_local(local_path)
remote_hash = _sha256_remote(client, tmp_path, is_windows)
if remote_hash is not None and local_hash != remote_hash:
print(f"MISMATCH on attempt {attempt}", file=sys.stderr)
sftp.remove(tmp_path)
if attempt < max_retries:
continue # Retry from scratch
else:
raise IOError(
f"CHECKSUM MISMATCH after {max_retries} attempts!\n"
f" local: {local_hash}\n"
f" remote: {remote_hash}"
)
elif remote_hash is not None:
print(f"OK ({local_hash[:16]}...)")
else:
print("SKIP (sha256sum unavailable)")
# Atomic rename: .part → final
try:
sftp.remove(remote_path)
except (FileNotFoundError, IOError):
pass
sftp.rename(tmp_path, remote_path)
try:
sftp.chmod(remote_path, 0o664)
except OSError:
pass
elapsed = time.time() - t0
_print_result(server, local_path, remote_path, file_size, elapsed)
return # Success
except (EOFError, TimeoutError, OSError,
paramiko.SSHException, ConnectionError) as e:
print(f"Attempt {attempt}/{max_retries} failed: {e}", file=sys.stderr)
if attempt < max_retries:
delay = max(5, min(2 ** attempt, 30))
print(f"Retry in {delay}s...", file=sys.stderr)
time.sleep(delay)
else:
raise SystemExit(f"ERROR: Upload failed after {max_retries} attempts: {e}")
finally:
if sftp:
try: sftp.close()
except Exception: pass
if client:
try: client.close()
except Exception: pass
def _print_result(server, local_path, remote_path, file_size, elapsed):
info = f"{_fmt_size(file_size)}, {elapsed:.1f}s"
if file_size >= 1024 * 1024 and elapsed > 0:
speed = file_size / elapsed
info += f", {_fmt_size(int(speed))}/s"
print(f"OK: {local_path} -> {server['alias']}:{normalized_remote_path} ({info})")
finally:
client.close()
print(f"OK: {local_path} -> {server['alias']}:{remote_path} ({info})")
def download_file(server: dict, remote_path: str, local_path: str):
normalized_remote_path = _normalize_remote_path(remote_path)

View File

@@ -1,6 +1,6 @@
"""Version info for ServerManager."""
__version__ = "1.8.71"
__version__ = "1.8.73"
__app_name__ = "ServerManager"
__author__ = "aibot777"
__description__ = "Desktop GUI for managing remote servers"