Python integration
Use requests to connect backend Python applications to Project-M LLM.
requests example
import os
import requests
PROJECTM_API_KEY = os.environ["PROJECTM_API_KEY"]
payload = {
"model": "chinna",
"messages": [
{"role": "system", "content": "You are Chinna AI inside my Python app."},
{"role": "user", "content": "Summarize today's tasks in 3 bullets."}
],
"temperature": 0.7,
"stream": False
}
response = requests.post(
"https://api.itsmechinna.com/v1/chat/completions",
headers={
"Authorization": f"Bearer {PROJECTM_API_KEY}",
"Content-Type": "application/json",
},
json=payload,
timeout=60,
)
response.raise_for_status()
data = response.json()
print(data["choices"][0]["message"]["content"])
FastAPI proxy
import os
import requests
from fastapi import FastAPI
app = FastAPI()
PROJECTM_API_KEY = os.environ["PROJECTM_API_KEY"]
@app.post("/ai")
def ai_chat(body: dict):
r = requests.post(
"https://api.itsmechinna.com/v1/chat/completions",
headers={
"Authorization": f"Bearer {PROJECTM_API_KEY}",
"Content-Type": "application/json",
},
json={
"model": body.get("model", "chinna"),
"messages": body["messages"],
"stream": False,
},
timeout=60,
)
return r.json()