v1.9.7: S3 folder download — recursive with preserved directory structure

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
chrome-storm-c442
2026-03-03 08:19:07 -05:00
parent 2a56ececd1
commit f445953a82
5 changed files with 98 additions and 10 deletions

View File

@@ -396,6 +396,10 @@ _EN = {
"s3_folder_failed": "Failed to create folder",
"s3_delete_folder_confirm": "Delete folder \"{folder}\" and all its contents?",
"s3_deleted_n": "Deleted {count} objects",
"s3_download_folder_title": "Save folder to...",
"s3_downloading_n": "Downloading {count} files...",
"s3_downloaded_n": "Downloaded {count} files",
"s3_download_partial": "Downloaded {ok}/{total} files",
"s3_copy_link_48h": "Copy Link (48h)",
"s3_copy_link_permanent": "Copy Direct Link",
"s3_generating_link": "Generating link...",
@@ -909,6 +913,10 @@ _RU = {
"s3_folder_failed": "Ошибка создания папки",
"s3_delete_folder_confirm": "Удалить папку \"{folder}\" со всем содержимым?",
"s3_deleted_n": "Удалено {count} объектов",
"s3_download_folder_title": "Сохранить папку в...",
"s3_downloading_n": "Скачивание {count} файлов...",
"s3_downloaded_n": "Скачано {count} файлов",
"s3_download_partial": "Скачано {ok}/{total} файлов",
"s3_copy_link_48h": "Ссылка (48ч)",
"s3_copy_link_permanent": "Прямая ссылка",
"s3_generating_link": "Генерация ссылки...",
@@ -1422,6 +1430,10 @@ _ZH = {
"s3_folder_failed": "创建文件夹失败",
"s3_delete_folder_confirm": "删除文件夹 \"{folder}\" 及其所有内容?",
"s3_deleted_n": "已删除 {count} 个对象",
"s3_download_folder_title": "保存文件夹到...",
"s3_downloading_n": "正在下载 {count} 个文件...",
"s3_downloaded_n": "已下载 {count} 个文件",
"s3_download_partial": "已下载 {ok}/{total} 个文件",
"s3_copy_link_48h": "复制链接 (48小时)",
"s3_copy_link_permanent": "复制直接链接",
"s3_generating_link": "生成链接中...",

View File

@@ -306,6 +306,30 @@ class S3Client:
log.error("S3 presigned URL failed: %s", exc)
return None
def list_all_objects(self, bucket: str, prefix: str = "") -> list[dict]:
"""List ALL objects under prefix recursively (no delimiter).
Returns list of {'Key', 'Size', 'LastModified'}.
"""
if not self._ensure_connected():
return []
try:
objects = []
paginator = self._client.get_paginator("list_objects_v2")
kwargs = {"Bucket": bucket}
if prefix:
kwargs["Prefix"] = prefix
for page in paginator.paginate(**kwargs):
for obj in page.get("Contents", []):
# Skip "folder" markers (zero-byte keys ending with /)
if obj["Key"].endswith("/") and obj.get("Size", 0) == 0:
continue
objects.append(obj)
self._last_ok = time.time()
return objects
except Exception as exc:
log.error("S3 list_all_objects failed: %s", exc)
return []
def delete_prefix(self, bucket: str, prefix: str) -> int:
"""Recursively delete all objects under a prefix. Returns count deleted."""
if not self._ensure_connected():