chat-with-ai/src/contexts/ConfigContext.tsx

256 lines
7.9 KiB
TypeScript

import React, { createContext, useContext, useState, useEffect } from 'react';
import { WEBLLM_MODELS, isWebLLMSupported } from '../utils/webLLMUtils';
// Define types for provider
export type Provider = 'openai' | 'anthropic' | 'google' | 'deepseek' | 'webllm' | 'custom';
// Model interface
export interface Model {
id: string;
name: string;
maxTokens?: number;
provider: Provider;
}
// Configuration context interface
interface ConfigContextType {
provider: Provider;
setProvider: (provider: Provider) => void;
apiKey: string;
setApiKey: (key: string) => void;
endpoint: string;
setEndpoint: (endpoint: string) => void;
selectedModel: string;
setSelectedModel: (model: string) => void;
availableModels: Model[];
streamingEnabled: boolean;
setStreamingEnabled: (enabled: boolean) => void;
isWebLLMAvailable: boolean;
webLLMStatusMessage: string;
}
// Create the context
const ConfigContext = createContext<ConfigContextType | undefined>(undefined);
// Default OpenAI models
const OPENAI_MODELS: Model[] = [
{ id: 'gpt-4o', name: 'GPT-4o', provider: 'openai' },
{ id: 'gpt-4-turbo', name: 'GPT-4 Turbo', provider: 'openai' },
{ id: 'gpt-4', name: 'GPT-4', provider: 'openai' },
{ id: 'gpt-3.5-turbo', name: 'GPT-3.5 Turbo', provider: 'openai' }
];
// Default Anthropic models
const ANTHROPIC_MODELS: Model[] = [
{ id: 'claude-3-opus-20240229', name: 'Claude 3 Opus', provider: 'anthropic' },
{ id: 'claude-3-sonnet-20240229', name: 'Claude 3 Sonnet', provider: 'anthropic' },
{ id: 'claude-3-haiku-20240307', name: 'Claude 3 Haiku', provider: 'anthropic' },
{ id: 'claude-2.1', name: 'Claude 2.1', provider: 'anthropic' },
{ id: 'claude-2.0', name: 'Claude 2.0', provider: 'anthropic' },
{ id: 'claude-instant-1.2', name: 'Claude Instant 1.2', provider: 'anthropic' }
];
// Default Google models
const GOOGLE_MODELS: Model[] = [
{ id: 'gemini-1.5-pro', name: 'Gemini 1.5 Pro', provider: 'google' },
{ id: 'gemini-1.5-flash', name: 'Gemini 1.5 Flash', provider: 'google' },
{ id: 'gemini-1.0-pro', name: 'Gemini 1.0 Pro', provider: 'google' },
{ id: 'gemini-1.0-ultra', name: 'Gemini 1.0 Ultra', provider: 'google' }
];
// Default DeepSeek models
const DEEPSEEK_MODELS: Model[] = [
{ id: 'deepseek-chat', name: 'DeepSeek Chat', provider: 'deepseek' },
{ id: 'deepseek-coder', name: 'DeepSeek Coder', provider: 'deepseek' }
];
// WebLLM models (from WebLLMUtils)
const webLLMModels: Model[] = WEBLLM_MODELS.map(model => ({
id: model.id,
name: model.name,
maxTokens: model.contextLength,
provider: 'webllm' as Provider
}));
// Get provider endpoint
const getProviderEndpoint = (provider: Provider): string => {
switch (provider) {
case 'openai':
return 'https://api.openai.com/v1/chat/completions';
case 'anthropic':
return 'https://api.anthropic.com/v1/messages';
case 'google':
return 'https://generativelanguage.googleapis.com/v1beta/models/gemini-1.0-pro:generateContent';
case 'deepseek':
return 'https://api.deepseek.com/v1/chat/completions';
case 'webllm':
return 'browser'; // Special marker for browser-based models
case 'custom':
return '';
default:
return '';
}
};
// Get models for provider
const getModelsForProvider = (provider: Provider): Model[] => {
switch (provider) {
case 'openai':
return OPENAI_MODELS;
case 'anthropic':
return ANTHROPIC_MODELS;
case 'google':
return GOOGLE_MODELS;
case 'deepseek':
return DEEPSEEK_MODELS;
case 'webllm':
return webLLMModels;
case 'custom':
return []; // Custom provider requires user to specify models
default:
return [];
}
};
export const ConfigProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
// Initialize from localStorage if available
const [provider, setProvider] = useState<Provider>(() => {
if (typeof window !== 'undefined') {
const savedProvider = localStorage.getItem('provider');
return (savedProvider as Provider) || 'openai';
}
return 'openai';
});
const [apiKey, setApiKey] = useState<string>(() => {
if (typeof window !== 'undefined') {
return localStorage.getItem('apiKey') || '';
}
return '';
});
const [endpoint, setEndpoint] = useState<string>(() => {
if (typeof window !== 'undefined') {
const savedEndpoint = localStorage.getItem('endpoint');
if (savedEndpoint) return savedEndpoint;
}
return getProviderEndpoint(provider);
});
const [selectedModel, setSelectedModel] = useState<string>(() => {
if (typeof window !== 'undefined') {
const savedModel = localStorage.getItem('selectedModel');
if (savedModel) return savedModel;
}
// Default models for each provider
const defaultModels: Record<Provider, string> = {
'openai': 'gpt-3.5-turbo',
'anthropic': 'claude-3-haiku-20240307',
'google': 'gemini-1.0-pro',
'deepseek': 'deepseek-chat',
'webllm': 'TinyLlama-1.1B-Chat-v1.0-q4f16_1',
'custom': ''
};
return defaultModels[provider];
});
const [streamingEnabled, setStreamingEnabled] = useState<boolean>(() => {
if (typeof window !== 'undefined') {
const saved = localStorage.getItem('streamingEnabled');
return saved ? saved === 'true' : true; // default to true
}
return true;
});
// WebLLM support
const [isWebLLMAvailable, setIsWebLLMAvailable] = useState<boolean>(false);
const [webLLMStatusMessage, setWebLLMStatusMessage] = useState<string>('Checking WebLLM support...');
// Update available models when provider changes
const [availableModels, setAvailableModels] = useState<Model[]>(
getModelsForProvider(provider)
);
// Check WebLLM support on component mount
useEffect(() => {
// Only run in browser environment
if (typeof window !== 'undefined') {
try {
const supported = isWebLLMSupported();
setIsWebLLMAvailable(supported);
setWebLLMStatusMessage(supported
? 'WebLLM is supported in this browser'
: 'WebLLM is not supported in this browser (requires WebAssembly, SharedArrayBuffer, and Atomics API)');
} catch (error) {
setIsWebLLMAvailable(false);
setWebLLMStatusMessage('Error checking WebLLM support: ' +
(error instanceof Error ? error.message : 'Unknown error'));
}
}
}, []);
// Update models when provider changes
useEffect(() => {
const models = getModelsForProvider(provider);
setAvailableModels(models);
// If the selected model is not in the new provider's models, select the first one
const modelExists = models.some(model => model.id === selectedModel);
if (!modelExists && models.length > 0) {
setSelectedModel(models[0].id);
}
// Update endpoint when provider changes (unless custom)
if (provider !== 'custom') {
setEndpoint(getProviderEndpoint(provider));
}
// Save to localStorage
if (typeof window !== 'undefined') {
localStorage.setItem('provider', provider);
}
}, [provider, selectedModel]);
// Save settings to localStorage when they change
useEffect(() => {
if (typeof window !== 'undefined') {
localStorage.setItem('apiKey', apiKey);
localStorage.setItem('endpoint', endpoint);
localStorage.setItem('selectedModel', selectedModel);
localStorage.setItem('streamingEnabled', String(streamingEnabled));
}
}, [apiKey, endpoint, selectedModel, streamingEnabled]);
const value = {
provider,
setProvider,
apiKey,
setApiKey,
endpoint,
setEndpoint,
selectedModel,
setSelectedModel,
availableModels,
streamingEnabled,
setStreamingEnabled,
isWebLLMAvailable,
webLLMStatusMessage
};
return (
<ConfigContext.Provider value={value}>
{children}
</ConfigContext.Provider>
);
};
export const useConfig = () => {
const context = useContext(ConfigContext);
if (context === undefined) {
throw new Error('useConfig must be used within a ConfigProvider');
}
return context;
};