Quickstart - Working with the BizAI Chat
Step by step Guide
The BizAIChat is a conversation session between a user and an AI agent. BizAChat stores Messages and automatically handles truncation to fit content into a model’s context.
The BizAI chat opens in the context of a specific business. It allows you to ask the LLM questions based on its understanding of the business's properties, such as name, description, products, services, and more.
Following is a simple step-by-step guide to how you can send a message to BizAI and get a streamed response from the LLM
Full reference to the API can be found here: https://developers.intandem.tech/v3.0/reference/the-bizai-platform
Step 1 - Create a chat
Create a new chat and get it uid
const url = 'https://api.vcita.biz/v3/ai/bizai_chats';
const body = {
agent: 'vanilla',
metadata: {
instruction: 'You are a comedian assistant. Answer with a joke at the end of each message.',
},
};
fetch(url, {
headers: {
accept: 'application/json, text/plain, */*',
'accept-language': 'en-US,en;q=0.8',
authorization: 'Bearer <YOUR_TOKEN>',
'content-type': 'application/json',
},
body: JSON.stringify(body),
method: 'POST',
})
.then((response) => {
if (!response.ok) {
throw new Error('Failed to create chat');
}
return response.json();
})
.then((data) => {
// Save the chat UID for future use
console.log(data);
})
.catch((error) => {
console.error('Error:', error);
});
Step 2 - Send a prompt to the chat
const url = `https://api.vcita.biz/v3/ai/bizai_chat_messages?ai_chat_uid=${aiChatUid}`;
const body = {
content: {
type: 'text',
content: {
text: 'Tell me a joke!',
},
},
streaming: true,
};
fetch(url, {
headers: {
accept: 'application/json, text/plain, */*',
authorization: 'Bearer <YOUR_TOKEN>',
'content-type': 'application/json',
},
body: JSON.stringify(body),
method: 'POST',
});
Step 3 - Listen to the returning stream
Keep digesting the returning streamed messages from the chat until the stream is completed.
const aiChatUid = '6f7202b8-cd71-4c24-8c6b-359af2a344c6';
const url = `https://api.vcita.biz/v3/ai/bizai_chat_messages?ai_chat_uid=${aiChatUid}`;
const body = {
content: {
type: 'text',
content: {
text: 'Tell me a joke!',
},
},
streaming: true,
};
fetch(url, {
headers: {
accept: 'application/json, text/plain, */*',
authorization: 'Bearer <YOUR_TOKEN>',
'content-type': 'application/json',
},
body: JSON.stringify(body),
method: 'POST',
})
.then(async (response) => {
if (!response.ok) {
throw response;
}
await processStream(response)
})
.catch((error) => {
// Do something with the error
console.error('Error:', error);
});
} catch (error) {
console.error('Error sending message:', error);
}
}
async function processStream(response) {
const reader = response.body.getReader();
const decoder = new TextDecoder('utf-8');
let buffer = '';
let finishReasonMet = false;
let accumulatedData = '';
try {
let reading = true;
let { value, done } = await reader.read();
while (reading) {
if (done) {
reading = false;
}
buffer += decoder.decode(value, { stream: true });
const { remainingBuffer, parsedData } = processBuffer(buffer);
buffer = remainingBuffer;
for (let i = 0; i < parsedData.length; i++) {
const parsed = parsedData[i];
finishReasonMet = finishReasonMet || parsed.finish_reason === 'stop';
if (parsed.delta) {
accumulatedData += parsed.delta;
}
}
if (!done) {
({ value, done } = await reader.read());
}
}
if (!finishReasonMet) {
console.error('Stream did not end with the expected \'stop\' finish reason.');
}
} catch (err) {
console.error('Error reading stream:', err);
} finally {
console.log('accumulatedData:', accumulatedData);
reader.releaseLock();
}
}
function processBuffer(buffer) {
let remainingBuffer = buffer;
let eolIndex = remainingBuffer.indexOf('\n');
const parsedData = [];
while (eolIndex !== -1) {
const line = remainingBuffer.slice(0, eolIndex).trim();
remainingBuffer = remainingBuffer.slice(eolIndex + 1);
if (line) {
const parsed = JSON.parse(line);
parsedData.push(parsed);
}
eolIndex = remainingBuffer.indexOf('\n');
}
return { remainingBuffer, parsedData };
}
Updated 6 months ago