suvo-patch-1
Kar 2025-01-29 22:27:10 +05:30
parent e589d66e36
commit 7db4089223
1 changed files with 24 additions and 19 deletions

View File

@ -14,12 +14,13 @@ const aiFollowupQuestion = async (req, res) => {
const PROVIDER_KEYS = JSON.parse(process.env.PROVIDER_KEY || '[]'); const PROVIDER_KEYS = JSON.parse(process.env.PROVIDER_KEY || '[]');
const PROVIDER_MODELS = { const PROVIDER_MODELS = {
openai: [ "gpt-4o", "gpt-4o-mini", "o1", "o1-mini", "gpt-3.5-turbo", "gpt-3.5", "gpt-3.5-turbo-mini", "gpt-3.5-mini", "gpt-3.5-turbo-2", "gpt-3.5-2", "gpt-3.5-turbo-2-mini", "gpt-3.5-2-mini" ], openai: [ "gpt-4o", "gpt-4o-mini", "o1", "o1-mini", "gpt-3.5-turbo", "gpt-3.5-turbo-mini", "gpt-3.5-mini", "gpt-3.5-turbo-2", "gpt-3.5-2", "gpt-3.5-turbo-2-mini", "gpt-3.5-2-mini" ],
deepseek: [ "deepseek-chat", "deepseek-reasoner", "deepseek-qa", "deepseek-qa-mini" ], deepseek: [ "deepseek-chat", "deepseek-reasoner", "deepseek-qa", "deepseek-qa-mini" ],
// Add more providers and models as needed // Add more providers and models as needed
}; };
async function fetchOpenAICompletion(messages, model, max_tokens = MAX_TOKENS) { async function fetchOpenAICompletion(messages, model, max_tokens = MAX_TOKENS) {
// console.log(messages)
const response = await fetch('https://api.openai.com/v1/chat/completions', { const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST', method: 'POST',
headers: { headers: {
@ -55,7 +56,7 @@ const aiFollowupQuestion = async (req, res) => {
try { try {
const { prompt, sessionId, provider, model = "default-model", key } = req.body; const { prompt, sessionId, provider, model = "gpt-4o-mini", key } = req.body;
if (!key || !PROVIDER_KEYS.includes(key)) { if (!key || !PROVIDER_KEYS.includes(key)) {
return res.status(403).json({ return res.status(403).json({
@ -82,7 +83,7 @@ const aiFollowupQuestion = async (req, res) => {
if (req.body['list-ais']) { if (req.body['list-ais']) {
return res.json({ return res.json({
success: true, success: true,
providers: ['OpenAI', 'DeepSeek', 'Gemini', 'Manual'], providers: ['openai', 'deepseek', 'gemini', 'manual'],
}); });
} }
@ -118,23 +119,27 @@ const aiFollowupQuestion = async (req, res) => {
let aiResponse; let aiResponse;
switch (provider) { if(prompt.length > 3) {
case 'openai': switch (provider) {
aiResponse = await fetchOpenAICompletion(conversation.conversationHistory, model); case 'openai':
break; aiResponse = await fetchOpenAICompletion([{ role: 'user', content: prompt }], model);
case 'deepseek': // console.log(aiResponse)
aiResponse = await fetchDeepSeekCompletion(conversation.conversationHistory, model); break;
break; case 'deepseek':
// case 'gemini': aiResponse = await fetchDeepSeekCompletion([{ role: 'user', content: prompt }], model);
// aiResponse = await fetchGeminiCompletion(conversation.conversationHistory, model); break;
// break; // case 'gemini':
// Add more cases for other providers as needed // aiResponse = await fetchGeminiCompletion(conversation.conversationHistory, model);
default: // break;
return res.status(400).json({ // Add more cases for other providers as needed
success: false, default:
error: 'Invalid provider specified.', return res.status(400).json({
}); success: false,
error: 'Invalid provider specified.',
});
}
} }
conversation.conversationHistory.push({ role: 'assistant', content: aiResponse.choices ? aiResponse.choices[0].message.content : aiResponse.response }); conversation.conversationHistory.push({ role: 'assistant', content: aiResponse.choices ? aiResponse.choices[0].message.content : aiResponse.response });