aiTest
parent
136286599f
commit
f5d6c3cedc
|
@ -1,6 +1,6 @@
|
|||
const aiFollowupQuestion = async (req, res) => {
|
||||
const { MongoClient } = require('mongodb');
|
||||
const fetch = require('node-fetch');
|
||||
// const fetch = require('node-fetch');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const url = process.env.MONGODB_URL;
|
||||
const dbName = process.env.MONGO_DB_NAME;
|
||||
|
@ -10,7 +10,9 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
const database = client.db(dbName);
|
||||
const conversationsCollection = database.collection('conversations');
|
||||
|
||||
async function fetchOpenAICompletion(messages, model = "gpt-3.5-turbo-16k", max_tokens = 200) {
|
||||
const MAX_TOKENS = 200; // Set max_tokens as a constant
|
||||
|
||||
async function fetchOpenAICompletion(messages, model, max_tokens = MAX_TOKENS) {
|
||||
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
@ -26,7 +28,7 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
return response.json();
|
||||
}
|
||||
|
||||
async function fetchDeepSeekCompletion(messages) {
|
||||
async function fetchDeepSeekCompletion(messages, model) {
|
||||
const response = await fetch('https://api.deepseek.com/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
@ -34,7 +36,7 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: "deepseek-chat",
|
||||
model: model,
|
||||
messages: messages,
|
||||
stream: false,
|
||||
}),
|
||||
|
@ -45,7 +47,15 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
// Similar function for Gemini can be added here...
|
||||
|
||||
try {
|
||||
const { prompt, sessionId, provider, model, max_tokens = 200 } = req.body;
|
||||
// Check if the request is for listing AI providers
|
||||
if (req.body['list-ais']) {
|
||||
return res.json({
|
||||
success: true,
|
||||
providers: ['OpenAI', 'DeepSeek', 'Gemini', 'Manual'],
|
||||
});
|
||||
}
|
||||
|
||||
const { prompt, sessionId, provider, model = "default-model" } = req.body;
|
||||
|
||||
if (!conversationsCollection) {
|
||||
return res.status(500).json({
|
||||
|
@ -81,13 +91,13 @@ const aiFollowupQuestion = async (req, res) => {
|
|||
|
||||
switch (provider) {
|
||||
case 'openai':
|
||||
aiResponse = await fetchOpenAICompletion(conversation.conversationHistory, model, max_tokens);
|
||||
aiResponse = await fetchOpenAICompletion(conversation.conversationHistory, model);
|
||||
break;
|
||||
case 'deepseek':
|
||||
aiResponse = await fetchDeepSeekCompletion(conversation.conversationHistory);
|
||||
aiResponse = await fetchDeepSeekCompletion(conversation.conversationHistory, model);
|
||||
break;
|
||||
// case 'gemini':
|
||||
// aiResponse = await fetchGeminiCompletion(conversation.conversationHistory);
|
||||
// aiResponse = await fetchGeminiCompletion(conversation.conversationHistory, model);
|
||||
// break;
|
||||
// Add more cases for other providers as needed
|
||||
default:
|
||||
|
|
|
@ -34,6 +34,7 @@ const aiEvaluateImageToStar = require("../api/aiEvaluateImageToStar");
|
|||
const getGameInfo = require("../api/getGameInfo");
|
||||
const saveGalleryImage = require("../api/saveGalleryImage");
|
||||
const getGalleryImage = require("../api/getGalleryImage");
|
||||
const aiTest = require("../api/aiTest");
|
||||
|
||||
|
||||
const router = express.Router();
|
||||
|
@ -207,6 +208,11 @@ router.get("/ping", (req, res) => {
|
|||
router.get("/getGalleryImage", (req, res) => {
|
||||
getGalleryImage(req, res);
|
||||
});
|
||||
|
||||
// Get Drawing Game Gallery Image
|
||||
router.post("/aiTest", (req, res) => {
|
||||
aiTest(req, res);
|
||||
});
|
||||
|
||||
|
||||
module.exports = router;
|
||||
|
|
Loading…
Reference in New Issue