s3-upload-file

code_execution · unverified · null · json · download .py

AWS region

import sys
import os
import subprocess
import time
import tempfile
import urllib.request
import json

# ─────────────────────────────────────────
# PRE_EXECUTION
# ─────────────────────────────────────────

for attempt in range(2):
    try:
        req = urllib.request.Request(
            "https://checklist.day/api/registry/boto3",
            headers={"User-Agent": "checklist-agent/1.0"}
        )
        with urllib.request.urlopen(req, timeout=10) as resp:
            registry = json.loads(resp.read())
            break
    except Exception as e:
        if attempt == 1:
            print(f"ABORT: registry unreachable — {e}")
            sys.exit(1)
        time.sleep(2)

warnings = registry.get("warnings", [])
if warnings:
    print("[boto3] WARNINGS:")
    for w in warnings if isinstance(warnings, list) else [warnings]:
        print(f"  ⚠ {w}")

# ─────────────────────────────────────────
# EXECUTION
# ─────────────────────────────────────────

subprocess.check_call([sys.executable, "-m", "pip", "install", "-q", "boto3>=1.26.0"])

import boto3
from botocore.exceptions import ClientError

AWS_ACCESS_KEY_ID     = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
AWS_REGION            = os.environ.get("AWS_REGION", "us-east-1")
S3_BUCKET             = os.environ.get("S3_BUCKET")

if not AWS_ACCESS_KEY_ID:
    print("ABORT: AWS_ACCESS_KEY_ID not set"); sys.exit(1)
if not AWS_SECRET_ACCESS_KEY:
    print("ABORT: AWS_SECRET_ACCESS_KEY not set"); sys.exit(1)
if not S3_BUCKET:
    print("ABORT: S3_BUCKET not set"); sys.exit(1)

KEY = "checklist-test/upload-test.txt"
CONTENT = b"checklist.day s3-upload-file test\n"

client = boto3.client(
    "s3",
    aws_access_key_id=AWS_ACCESS_KEY_ID,
    aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
    region_name=AWS_REGION,
)

# Write to temp file
with tempfile.NamedTemporaryFile(delete=False, suffix=".txt") as f:
    f.write(CONTENT)
    tmp_path = f.name

try:
    # FOOTGUN: upload_file handles multipart automatically for large files
    # FOOTGUN: put_object requires the whole file in memory — don't use for large files
    # FOOTGUN: ExtraArgs for ContentType — without it S3 serves as binary/octet-stream
    client.upload_file(
        tmp_path,
        S3_BUCKET,
        KEY,
        ExtraArgs={"ContentType": "text/plain"},
    )
    print(f"  uploaded: s3://{S3_BUCKET}/{KEY}")

    # Verify with head_object
    head = client.head_object(Bucket=S3_BUCKET, Key=KEY)
    size_bytes = head["ContentLength"]
    upload_ok  = size_bytes == len(CONTENT)
    print(f"  verified: {size_bytes} bytes (expected {len(CONTENT)})")

    # Cleanup
    # client.delete_object(Bucket=S3_BUCKET, Key=KEY)
    # print(f"  cleaned up: {KEY}")

finally:
    os.unlink(tmp_path)

# ─────────────────────────────────────────
# POST_EXECUTION
# ─────────────────────────────────────────

assert upload_ok, f"FAIL: size mismatch — got {size_bytes}, expected {len(CONTENT)}"

result = {
    "bucket":     S3_BUCKET,
    "key":        KEY,
    "size_bytes": size_bytes,
    "upload_ok":  upload_ok,
}
print(json.dumps(result, indent=2))
print("PASS")