Node.js integration
Use the native fetch API or any OpenAI-compatible client by pointing it to the Project-M base URL.
Server-side fetch example
const PROJECTM_API_KEY = process.env.PROJECTM_API_KEY;
async function askChinna(message) {
const response = await fetch("https://api.itsmechinna.com/v1/chat/completions", {
method: "POST",
headers: {
"Authorization": `Bearer ${PROJECTM_API_KEY}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
model: "chinna",
messages: [
{ role: "system", content: "You are Chinna AI inside my application." },
{ role: "user", content: message }
],
temperature: 0.7,
stream: false
})
});
if (!response.ok) {
throw new Error(`Project-M API error: ${response.status} ${await response.text()}`);
}
const data = await response.json();
return data.choices?.[0]?.message?.content ?? "";
}
askChinna("Write a dashboard welcome message.")
.then(console.log)
.catch(console.error);
Express API proxy
import express from "express";
const app = express();
app.use(express.json());
app.post("/api/ai", async (req, res) => {
try {
const r = await fetch("https://api.itsmechinna.com/v1/chat/completions", {
method: "POST",
headers: {
Authorization: `Bearer ${process.env.PROJECTM_API_KEY}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
model: req.body.model || "chinna",
messages: req.body.messages
})
});
res.status(r.status).send(await r.text());
} catch (error) {
res.status(500).json({ error: error.message });
}
});
app.listen(3000);