gpt-3.5-turbo-16k

galleryApi
Kar 2024-11-15 12:02:00 +05:30
parent 02ec79b262
commit 7fcc27ada1
4 changed files with 61 additions and 69 deletions

View File

@ -3,7 +3,7 @@ const aiFeedbackOnReport = async (req, res) => {
const url = process.env.AI_API_ENDOINT;
const api_key = process.env.AI_API_KEY;
const payload = {
"model": "gpt-4o-mini",
"model": "gpt-3.5-turbo-16k",
"messages": [{ "role": "user",
"content": JSON.stringify(req.body.grades) + " - first dataset(grades) " + JSON.stringify(req.body.suggested) + " - second dataset(suggested actions) " + req.body.instruction }]
}

View File

@ -3,7 +3,7 @@ const aiFeedbackOnReport = async (req, res) => {
const url = process.env.AI_API_ENDOINT;
const api_key = process.env.AI_API_KEY;
const payload = {
"model": "gpt-4o-mini",
"model": "gpt-3.5-turbo-16k",
"messages": [{ "role": "user",
"content": JSON.stringify(req.body.score) + " - is the students report card. " + req.body.instruction }]
}

View File

@ -11,7 +11,7 @@ const aiFollowupQuestion = async (req, res) => {
const database = client.db(dbName); // Replace with your database name
const conversationsCollection = database.collection('conversations'); // Replace with your collection name
async function fetchOpenAICompletion(prompt, messages, model = "gpt-4o-mini", max_tokens = 200) {
async function fetchOpenAICompletion(prompt, messages, model = "gpt-3.5-turbo-16k", max_tokens = 200) {
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
@ -31,7 +31,7 @@ const aiFollowupQuestion = async (req, res) => {
try {
const { prompt, sessionId, model = "gpt-4o-mini", max_tokens = 200 } = req.body;
const { prompt, sessionId, model = "gpt-3.5-turbo-16k", max_tokens = 200 } = req.body;
if (!conversationsCollection) {
return res.status(500).json({
success: false,

View File

@ -1,4 +1,5 @@
const aiFollowupQuestion = async (req, res) => {
const { MongoClient } = require('mongodb');
const fetch = require('node-fetch');
const { v4: uuidv4 } = require('uuid');
@ -10,7 +11,7 @@ const aiFollowupQuestion = async (req, res) => {
const database = client.db(dbName);
const conversationsCollection = database.collection('conversations');
async function fetchOpenAICompletion(prompt, messages, model = "gpt-4o-mini", max_tokens = 200) {
async function fetchOpenAICompletion(prompt, messages, model = "gpt-3.5-turbo-16k", max_tokens = 200) {
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
@ -28,72 +29,63 @@ const aiFollowupQuestion = async (req, res) => {
return data;
}
try {
const { prompt, sessionId, model = "gpt-4o-mini", max_tokens = 200 } = req.body;
if (!conversationsCollection) {
return res.status(500).json({
success: false,
error: 'MongoDB is not connected yet. Please try again later.',
});
}
let conversation;
if (!sessionId) {
const newSessionId = uuidv4();
// New conversation, start with system and user message
conversation = {
sessionId: newSessionId,
conversationHistory: [
{ role: 'system', content: 'answer within 50 words' },
{ role: 'user', content: prompt },
],
};
await conversationsCollection.insertOne(conversation);
} else {
// Existing conversation, find it by sessionId
conversation = await conversationsCollection.findOne({ sessionId: sessionId });
if (!conversation) {
return res.status(400).json({
try {
const { prompt, sessionId, model = "gpt-3.5-turbo-16k", max_tokens = 200 } = req.body;
if (!conversationsCollection) {
return res.status(500).json({
success: false,
error: 'Invalid session ID.',
error: 'MongoDB is not connected yet. Please try again later.',
});
}
// Keep only the last 2 messages (1 user and 1 assistant)
conversation.conversationHistory.push({ role: 'user', content: prompt });
// Limit conversation history to the last two entries: one user, one assistant
const lastTwoMessages = conversation.conversationHistory.slice(-2);
conversation.conversationHistory = lastTwoMessages;
let conversation;
if (!sessionId) {
const newSessionId = uuidv4();
conversation = {
sessionId: newSessionId,
conversationHistory: [
{ role: 'system', content: 'answer within 50 words' },
{ role: 'user', content: prompt },
],
};
await conversationsCollection.insertOne(conversation);
} else {
conversation = await conversationsCollection.findOne({ sessionId: sessionId });
if (!conversation) {
return res.status(400).json({
success: false,
error: 'Invalid session ID.',
});
}
conversation.conversationHistory.push({ role: 'user', content: prompt });
}
const aiResponse = await fetchOpenAICompletion(prompt, conversation.conversationHistory, model, max_tokens);
conversation.conversationHistory.push({ role: 'assistant', content: aiResponse.choices[0].message.content });
await conversationsCollection.updateOne(
{ sessionId: conversation.sessionId },
{ $set: { conversationHistory: conversation.conversationHistory } }
);
res.json({
success: true,
data: aiResponse.choices[0].message.content,
total_tokens: aiResponse.usage.total_tokens,
sessionId: conversation.sessionId,
});
} catch (error) {
console.error('Error generating response:', error.message);
res.status(500).json({
success: false,
error: 'Something went wrong. Please try again later.',
});
}
// Fetch AI response based on the last two messages (user + assistant)
const aiResponse = await fetchOpenAICompletion(prompt, conversation.conversationHistory, model, max_tokens);
// Add the AI response to conversation history
conversation.conversationHistory.push({ role: 'assistant', content: aiResponse.choices[0].message.content });
// Update conversation in the database with new history
await conversationsCollection.updateOne(
{ sessionId: conversation.sessionId },
{ $set: { conversationHistory: conversation.conversationHistory } }
);
res.json({
success: true,
data: aiResponse.choices[0].message.content,
total_tokens: aiResponse.usage.total_tokens,
sessionId: conversation.sessionId,
});
} catch (error) {
console.error('Error generating response:', error.message);
res.status(500).json({
success: false,
error: 'Something went wrong. Please try again later.',
});
}
}
module.exports = aiFollowupQuestion;
module.exports = aiFollowupQuestion;