from openai import OpenAI
# Initialize client
client = OpenAI(
api_key="your-api-key",
base_url="https://ai.machinefi.com/v1"
)
# Simple conversation
def simple_chat(prompt):
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": prompt}],
max_tokens=1000
)
return response.choices[0].message.content
# Multi-turn conversation
def multi_turn_chat():
messages = [
{"role": "system", "content": "You are a Python programming assistant"},
{"role": "user", "content": "How to read CSV files?"}
]
response = client.chat.completions.create(
model="gpt-4",
messages=messages,
temperature=0.7
)
# Add AI response to conversation history
messages.append({
"role": "assistant",
"content": response.choices[0].message.content
})
return messages
# Streaming output
def stream_chat(prompt):
stream = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": prompt}],
stream=True
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content, end="")
# Usage examples
if __name__ == "__main__":
# Simple call
result = simple_chat("Write a Python bubble sort function")
print(result)
# Streaming output
stream_chat("Explain basic concepts of machine learning")