working1
parent
e589d66e36
commit
7db4089223
|
@ -14,12 +14,13 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
const PROVIDER_KEYS = JSON.parse(process.env.PROVIDER_KEY || '[]');
|
||||
|
||||
const PROVIDER_MODELS = {
|
||||
openai: [ "gpt-4o", "gpt-4o-mini", "o1", "o1-mini", "gpt-3.5-turbo", "gpt-3.5", "gpt-3.5-turbo-mini", "gpt-3.5-mini", "gpt-3.5-turbo-2", "gpt-3.5-2", "gpt-3.5-turbo-2-mini", "gpt-3.5-2-mini" ],
|
||||
openai: [ "gpt-4o", "gpt-4o-mini", "o1", "o1-mini", "gpt-3.5-turbo", "gpt-3.5-turbo-mini", "gpt-3.5-mini", "gpt-3.5-turbo-2", "gpt-3.5-2", "gpt-3.5-turbo-2-mini", "gpt-3.5-2-mini" ],
|
||||
deepseek: [ "deepseek-chat", "deepseek-reasoner", "deepseek-qa", "deepseek-qa-mini" ],
|
||||
// Add more providers and models as needed
|
||||
};
|
||||
|
||||
async function fetchOpenAICompletion(messages, model, max_tokens = MAX_TOKENS) {
|
||||
// console.log(messages)
|
||||
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
@ -55,7 +56,7 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
|
||||
try {
|
||||
|
||||
const { prompt, sessionId, provider, model = "default-model", key } = req.body;
|
||||
const { prompt, sessionId, provider, model = "gpt-4o-mini", key } = req.body;
|
||||
|
||||
if (!key || !PROVIDER_KEYS.includes(key)) {
|
||||
return res.status(403).json({
|
||||
|
@ -82,7 +83,7 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
if (req.body['list-ais']) {
|
||||
return res.json({
|
||||
success: true,
|
||||
providers: ['OpenAI', 'DeepSeek', 'Gemini', 'Manual'],
|
||||
providers: ['openai', 'deepseek', 'gemini', 'manual'],
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -118,23 +119,27 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
|
||||
let aiResponse;
|
||||
|
||||
switch (provider) {
|
||||
case 'openai':
|
||||
aiResponse = await fetchOpenAICompletion(conversation.conversationHistory, model);
|
||||
break;
|
||||
case 'deepseek':
|
||||
aiResponse = await fetchDeepSeekCompletion(conversation.conversationHistory, model);
|
||||
break;
|
||||
// case 'gemini':
|
||||
// aiResponse = await fetchGeminiCompletion(conversation.conversationHistory, model);
|
||||
// break;
|
||||
// Add more cases for other providers as needed
|
||||
default:
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid provider specified.',
|
||||
});
|
||||
if(prompt.length > 3) {
|
||||
switch (provider) {
|
||||
case 'openai':
|
||||
aiResponse = await fetchOpenAICompletion([{ role: 'user', content: prompt }], model);
|
||||
// console.log(aiResponse)
|
||||
break;
|
||||
case 'deepseek':
|
||||
aiResponse = await fetchDeepSeekCompletion([{ role: 'user', content: prompt }], model);
|
||||
break;
|
||||
// case 'gemini':
|
||||
// aiResponse = await fetchGeminiCompletion(conversation.conversationHistory, model);
|
||||
// break;
|
||||
// Add more cases for other providers as needed
|
||||
default:
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid provider specified.',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
conversation.conversationHistory.push({ role: 'assistant', content: aiResponse.choices ? aiResponse.choices[0].message.content : aiResponse.response });
|
||||
|
||||
|
|
Loading…
Reference in New Issue