{"id":"s3-upload-file","version":"1.0.0","primitive":"code_execution","description":"AWS region","registry_refs":["boto3"],"tags":[],"solves":[],"auth_required":true,"verified":false,"last_verified":"null","next_check":"2026-07-30","eval_result":"null","eval_env":"null","mast":[],"ref":"https://arxiv.org/abs/2503.13657","inputs":[{"name":"AWS_ACCESS_KEY_ID","required":true,"description":"AWS access key ID"},{"name":"AWS_SECRET_ACCESS_KEY","required":true,"description":"AWS secret access key"},{"name":"S3_BUCKET","required":true,"description":"S3 bucket name to upload to"},{"name":"AWS_REGION","default":"us-east-1","required":false,"description":"AWS region"}],"executable":"# ============================================\n# checklist:     s3-upload-file\n# version:       1.0.0\n# primitive:     code_execution\n# description:   Upload a file to S3 using upload_file (multipart-aware) and verify the object exists after upload\n# registry_refs: boto3\n# auth_required: true\n# verified:      false\n# last_verified: null\n# next_check:    2026-07-30\n# eval_result:   null\n# eval_env:      null\n#\n# inputs:\n#   - name: AWS_ACCESS_KEY_ID\n#     required: true\n#     description: AWS access key ID\n#   - name: AWS_SECRET_ACCESS_KEY\n#     required: true\n#     description: AWS secret access key\n#   - name: S3_BUCKET\n#     required: true\n#     description: S3 bucket name to upload to\n#   - name: AWS_REGION\n#     required: false\n#     default: \"us-east-1\"\n#     description: AWS region\n#\n# OUTPUTS:\n#   bucket        — bucket uploaded to\n#   key           — S3 object key\n#   size_bytes    — size of uploaded object\n#   upload_ok     — true if object exists after upload\n#\n# MAST FAILURE MODES ADDRESSED:\n# FM-1.1 Disobey Task Specification        — upload_file used (not put_object) for multipart support\n# FM-3.2 No or Incomplete Verification     — head_object called after upload to verify existence\n# FM-3.3 Incorrect Verification            — size verified, not just 200 status\n#\n# ref: https://arxiv.org/abs/2503.13657\n# ============================================\n\nimport sys\nimport os\nimport subprocess\nimport time\nimport tempfile\nimport urllib.request\nimport json\n\n# ─────────────────────────────────────────\n# PRE_EXECUTION\n# ─────────────────────────────────────────\n\nfor attempt in range(2):\n    try:\n        req = urllib.request.Request(\n            \"https://checklist.day/api/registry/boto3\",\n            headers={\"User-Agent\": \"checklist-agent/1.0\"}\n        )\n        with urllib.request.urlopen(req, timeout=10) as resp:\n            registry = json.loads(resp.read())\n            break\n    except Exception as e:\n        if attempt == 1:\n            print(f\"ABORT: registry unreachable — {e}\")\n            sys.exit(1)\n        time.sleep(2)\n\nwarnings = registry.get(\"warnings\", [])\nif warnings:\n    print(\"[boto3] WARNINGS:\")\n    for w in warnings if isinstance(warnings, list) else [warnings]:\n        print(f\"  ⚠ {w}\")\n\n# ─────────────────────────────────────────\n# EXECUTION\n# ─────────────────────────────────────────\n\nsubprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"-q\", \"boto3>=1.26.0\"])\n\nimport boto3\nfrom botocore.exceptions import ClientError\n\nAWS_ACCESS_KEY_ID     = os.environ.get(\"AWS_ACCESS_KEY_ID\")\nAWS_SECRET_ACCESS_KEY = os.environ.get(\"AWS_SECRET_ACCESS_KEY\")\nAWS_REGION            = os.environ.get(\"AWS_REGION\", \"us-east-1\")\nS3_BUCKET             = os.environ.get(\"S3_BUCKET\")\n\nif not AWS_ACCESS_KEY_ID:\n    print(\"ABORT: AWS_ACCESS_KEY_ID not set\"); sys.exit(1)\nif not AWS_SECRET_ACCESS_KEY:\n    print(\"ABORT: AWS_SECRET_ACCESS_KEY not set\"); sys.exit(1)\nif not S3_BUCKET:\n    print(\"ABORT: S3_BUCKET not set\"); sys.exit(1)\n\nKEY = \"checklist-test/upload-test.txt\"\nCONTENT = b\"checklist.day s3-upload-file test\\n\"\n\nclient = boto3.client(\n    \"s3\",\n    aws_access_key_id=AWS_ACCESS_KEY_ID,\n    aws_secret_access_key=AWS_SECRET_ACCESS_KEY,\n    region_name=AWS_REGION,\n)\n\n# Write to temp file\nwith tempfile.NamedTemporaryFile(delete=False, suffix=\".txt\") as f:\n    f.write(CONTENT)\n    tmp_path = f.name\n\ntry:\n    # FOOTGUN: upload_file handles multipart automatically for large files\n    # FOOTGUN: put_object requires the whole file in memory — don't use for large files\n    # FOOTGUN: ExtraArgs for ContentType — without it S3 serves as binary/octet-stream\n    client.upload_file(\n        tmp_path,\n        S3_BUCKET,\n        KEY,\n        ExtraArgs={\"ContentType\": \"text/plain\"},\n    )\n    print(f\"  uploaded: s3://{S3_BUCKET}/{KEY}\")\n\n    # Verify with head_object\n    head = client.head_object(Bucket=S3_BUCKET, Key=KEY)\n    size_bytes = head[\"ContentLength\"]\n    upload_ok  = size_bytes == len(CONTENT)\n    print(f\"  verified: {size_bytes} bytes (expected {len(CONTENT)})\")\n\n    # Cleanup\n    # client.delete_object(Bucket=S3_BUCKET, Key=KEY)\n    # print(f\"  cleaned up: {KEY}\")\n\nfinally:\n    os.unlink(tmp_path)\n\n# ─────────────────────────────────────────\n# POST_EXECUTION\n# ─────────────────────────────────────────\n\nassert upload_ok, f\"FAIL: size mismatch — got {size_bytes}, expected {len(CONTENT)}\"\n\nresult = {\n    \"bucket\":     S3_BUCKET,\n    \"key\":        KEY,\n    \"size_bytes\": size_bytes,\n    \"upload_ok\":  upload_ok,\n}\nprint(json.dumps(result, indent=2))\nprint(\"PASS\")\n"}