OpenAI batch API python example

Tags:

OpenAI announced batch api which “returns completions within 24 hours for a 50% discount.”

To test it, I wrote a trivial python example of the API. I didn’t test the response retrieval yet since my run will take 24h, but I expect it works fine, hopefully.

Use jupyter notebook to persist the print output!

import json
from pathlib import Path

# your custom prompt gen. funcs.
from llm.prompt import system_message, user_message 


# format:
# {
#     "custom_id": "request-1",
#     "method": "POST",
#     "url": "/v1/chat/completions",
#     "body": {
#         "model": "gpt-3.5-turbo",
#         "messages": [
#             {"role": "system", "content": "You are a helpful assistant."},
#             {"role": "user", "content": "What is 2+2?"},
#         ],
#     },
# }

template = {
    "custom_id": "request-1",
    "method": "POST",
    "url": "/v1/chat/completions",
    "body": {"model": "gpt-4-turbo", "messages": []},
}

template["body"]["messages"].append(system_message())
template["body"]["messages"].append(user_message("Hello. Who are you?"))


with open("./batch_testing.jsonl", encoding="utf-8", mode="w") as input_file:
    input_file.write(json.dumps(template) + "\n")

uploaded_file = openai.files.create(
    file=Path("./batch_testing.jsonl"),
    purpose="batch",
)

batch = openai.batches.create(
    input_file_id=uploaded_file.id,
    completion_window="24h",
    endpoint="/v1/chat/completions",
)

print("Keep this id")
print(batch)

retrieved = openai.batches.retrieve(batch.id)

print("You should see 'in_progress')
print(retrieved.status)

# I didn't test what's in the below...
if retrieved.output_file_id:
    response = openai.files.content(retrieved.output_file_id)
    print(response)
   openai.files.delete(uploaded_file.id)
else:
   print("You don't have the result yet!")

Comments

Leave a Reply

Your email address will not be published. Required fields are marked *