openai-streaming-response

code_execution · unverified · null · json · download .py

Model to use for streaming completion

import sys
import os
import subprocess
import time
import urllib.request
import json

# ─────────────────────────────────────────
# PRE_EXECUTION
# ─────────────────────────────────────────

for attempt in range(2):
    try:
        req = urllib.request.Request(
            "https://checklist.day/api/registry/openai",
            headers={"User-Agent": "checklist-agent/1.0"}
        )
        with urllib.request.urlopen(req, timeout=10) as resp:
            registry = json.loads(resp.read())
            break
    except Exception as e:
        if attempt == 1:
            print(f"ABORT: registry unreachable — {e}")
            sys.exit(1)
        time.sleep(2)

warnings = registry.get("warnings", [])
if warnings:
    print("[openai] WARNINGS:")
    for w in warnings if isinstance(warnings, list) else [warnings]:
        print(f"  ⚠ {w}")

# ─────────────────────────────────────────
# EXECUTION
# ─────────────────────────────────────────

subprocess.check_call([sys.executable, "-m", "pip", "install", "-q", "openai>=1.0.0"])

from openai import OpenAI

OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
OPENAI_MODEL   = os.environ.get("OPENAI_MODEL", "gpt-4o-mini")

if not OPENAI_API_KEY:
    print("ABORT: OPENAI_API_KEY env var not set")
    sys.exit(1)

client = OpenAI(api_key=OPENAI_API_KEY)

chunks_received = 0
accumulated = []

# FOOTGUN: stream=True returns a generator, not a response object
# FOOTGUN: chunk.choices[0].delta.content is None on first and last chunks — must guard with `or ""`
# FOOTGUN: usage is None on all chunks except the last when stream_options={"include_usage": True}
with client.chat.completions.create(
    model=OPENAI_MODEL,
    messages=[
        {"role": "user", "content": "Count from 1 to 5, one number per line, nothing else."},
    ],
    max_tokens=32,
    temperature=0,
    stream=True,
) as stream:
    for chunk in stream:
        chunks_received += 1
        # FOOTGUN: always guard — delta.content is None on role/finish chunks
        delta = chunk.choices[0].delta.content if chunk.choices else None
        if delta:
            accumulated.append(delta)
            print(delta, end="", flush=True)

print()  # newline after stream

content  = "".join(accumulated)
stream_ok = chunks_received > 1 and len(content) > 0

print(f"  chunks received: {chunks_received}")
print(f"  content length:  {len(content)} chars")

# ─────────────────────────────────────────
# POST_EXECUTION
# ─────────────────────────────────────────

assert stream_ok, f"FAIL: streaming failed — chunks={chunks_received}, content={content!r}"
assert chunks_received > 1, "FAIL: expected multiple chunks, got 1 (not streaming?)"

# Verify numbers 1-5 are present in output
for n in ["1", "2", "3", "4", "5"]:
    assert n in content, f"FAIL: expected '{n}' in streamed content, got: {content!r}"

result = {
    "chunks_received": chunks_received,
    "content":         content.strip(),
    "stream_ok":       stream_ok,
}
print(json.dumps(result, indent=2))
print("PASS")