iimtt-api/src/routes/api/aiTest.js

123 lines
3.8 KiB
JavaScript

const aiFollowupQuestion = async (req, res) => {
const { MongoClient } = require('mongodb');
const fetch = require('node-fetch');
const { v4: uuidv4 } = require('uuid');
const url = process.env.MONGODB_URL;
const dbName = process.env.MONGO_DB_NAME;
const client = new MongoClient(url);
await client.connect();
const database = client.db(dbName);
const conversationsCollection = database.collection('conversations');
async function fetchOpenAICompletion(messages, model = "gpt-3.5-turbo-16k", max_tokens = 200) {
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.OPENAI_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: model,
messages: messages,
max_tokens: max_tokens,
}),
});
return response.json();
}
async function fetchDeepSeekCompletion(messages) {
const response = await fetch('https://api.deepseek.com/chat/completions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.DEEPSEEK_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: "deepseek-chat",
messages: messages,
stream: false,
}),
});
return response.json();
}
// Similar function for Gemini can be added here...
try {
const { prompt, sessionId, provider, model, max_tokens = 200 } = req.body;
if (!conversationsCollection) {
return res.status(500).json({
success: false,
error: 'MongoDB is not connected yet. Please try again later.',
});
}
let conversation;
if (!sessionId) {
const newSessionId = uuidv4();
conversation = {
sessionId: newSessionId,
conversationHistory: [
{ role: 'system', content: 'answer within 10 words' },
{ role: 'user', content: prompt },
],
};
await conversationsCollection.insertOne(conversation);
} else {
conversation = await conversationsCollection.findOne({ sessionId: sessionId });
if (!conversation) {
return res.status(400).json({
success: false,
error: 'Invalid session ID.',
});
}
conversation.conversationHistory.push({ role: 'user', content: prompt });
}
let aiResponse;
switch (provider) {
case 'openai':
aiResponse = await fetchOpenAICompletion(conversation.conversationHistory, model, max_tokens);
break;
case 'deepseek':
aiResponse = await fetchDeepSeekCompletion(conversation.conversationHistory);
break;
// case 'gemini':
// aiResponse = await fetchGeminiCompletion(conversation.conversationHistory);
// break;
// Add more cases for other providers as needed
default:
return res.status(400).json({
success: false,
error: 'Invalid provider specified.',
});
}
conversation.conversationHistory.push({ role: 'assistant', content: aiResponse.choices ? aiResponse.choices[0].message.content : aiResponse.response });
await conversationsCollection.updateOne(
{ sessionId: conversation.sessionId },
{ $set: { conversationHistory: conversation.conversationHistory } }
);
res.json({
success: true,
data: aiResponse.choices ? aiResponse.choices[0].message.content : aiResponse.response,
total_tokens: aiResponse.usage ? aiResponse.usage.total_tokens : undefined,
sessionId: conversation.sessionId,
});
} catch (error) {
console.error('Error generating response:', error.message);
res.status(500).json({
success: false,
error: 'Something went wrong. Please try again later.',
});
}
};
module.exports = aiFollowupQuestion;