v1.9.7: S3 folder download — recursive with preserved directory structure

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
chrome-storm-c442
2026-03-03 08:19:07 -05:00
parent 2a56ececd1
commit f445953a82
5 changed files with 98 additions and 10 deletions

View File

@@ -306,6 +306,30 @@ class S3Client:
log.error("S3 presigned URL failed: %s", exc)
return None
def list_all_objects(self, bucket: str, prefix: str = "") -> list[dict]:
"""List ALL objects under prefix recursively (no delimiter).
Returns list of {'Key', 'Size', 'LastModified'}.
"""
if not self._ensure_connected():
return []
try:
objects = []
paginator = self._client.get_paginator("list_objects_v2")
kwargs = {"Bucket": bucket}
if prefix:
kwargs["Prefix"] = prefix
for page in paginator.paginate(**kwargs):
for obj in page.get("Contents", []):
# Skip "folder" markers (zero-byte keys ending with /)
if obj["Key"].endswith("/") and obj.get("Size", 0) == 0:
continue
objects.append(obj)
self._last_ok = time.time()
return objects
except Exception as exc:
log.error("S3 list_all_objects failed: %s", exc)
return []
def delete_prefix(self, bucket: str, prefix: str) -> int:
"""Recursively delete all objects under a prefix. Returns count deleted."""
if not self._ensure_connected():