const express = require('express'); const cors = require('cors'); const bodyParser = require('body-parser'); const multer = require('multer'); const rateLimit = require('express-rate-limit'); const FormData = require('form-data'); const app = express(); const PORT = process.env.PORT || 5000; // Configure multer for file uploads (for audio/images) const upload = multer({ storage: multer.memoryStorage(), limits: { fileSize: 25 * 1024 * 1024 } // 25MB limit }); // Trust proxy - CRITICAL for Replit deployment app.set('trust proxy', 1); // Middleware - CORS Configuration const corsOptions = { origin: process.env.ALLOWED_ORIGINS ? process.env.ALLOWED_ORIGINS.split(',') : '*', credentials: true, maxAge: 86400 // 24 hours cache }; app.use(cors(corsOptions)); app.use(bodyParser.json({ limit: '50mb' })); // ========================================== // RATE LIMITING // ========================================== // General API Rate Limiter: 100 requests/minute per IP const generalLimiter = rateLimit({ windowMs: 1 * 60 * 1000, // 1 minute max: 100, message: { error: 'Too many requests from this IP, please try again later.', retryAfter: '1 minute' }, standardHeaders: true, legacyHeaders: false, handler: (req, res) => { res.status(429).json({ error: 'Rate limit exceeded', message: 'Too many requests from this IP, please try again later.', retryAfter: Math.ceil(req.rateLimit.resetTime / 1000) }); } }); // AI Chat Rate Limiter: 20 requests/minute per IP (expensive operations) const aiChatLimiter = rateLimit({ windowMs: 1 * 60 * 1000, // 1 minute max: 20, message: { error: 'Too many AI chat requests, please slow down.', retryAfter: '1 minute' }, standardHeaders: true, legacyHeaders: false, handler: (req, res) => { res.status(429).json({ error: 'Rate limit exceeded', message: 'Too many AI chat requests from this IP. AI operations are expensive, please slow down.', retryAfter: Math.ceil(req.rateLimit.resetTime / 1000) }); } }); // Image Generation Rate Limiter: 10 requests/hour per IP (very expensive) const imageGenLimiter = rateLimit({ windowMs: 60 * 60 * 1000, // 1 hour max: 10, message: { error: 'Too many image generation requests.', retryAfter: '1 hour' }, standardHeaders: true, legacyHeaders: false, handler: (req, res) => { res.status(429).json({ error: 'Rate limit exceeded', message: 'Too many image generation requests from this IP. Image generation is very expensive, please wait before trying again.', retryAfter: Math.ceil(req.rateLimit.resetTime / 1000) }); } }); // Apply general rate limiter to all routes app.use(generalLimiter); // ========================================== // HELPER FUNCTIONS // ========================================== // Helper function to make fetch requests with timeout async function fetchWithTimeout(url, options = {}, timeoutMs = 30000) { const controller = new AbortController(); const timeout = setTimeout(() => controller.abort(), timeoutMs); try { const response = await fetch(url, { ...options, signal: controller.signal }); clearTimeout(timeout); return response; } catch (error) { clearTimeout(timeout); if (error.name === 'AbortError') { const timeoutError = new Error(`Request timeout after ${timeoutMs}ms`); timeoutError.status = 408; // Request Timeout timeoutError.data = { error: 'Request Timeout', message: `The request took longer than ${timeoutMs}ms` }; throw timeoutError; } throw error; } } // Helper function to call OpenAI API async function callOpenAI(endpoint, body, method = 'POST', isFormData = false) { const apiKey = process.env.OPENAI_API_KEY; if (!apiKey) { throw new Error('OpenAI API key not configured'); } const headers = { 'Authorization': `Bearer ${apiKey}` }; const options = { method, headers }; // Only add body for POST/PUT/PATCH methods if (method !== 'GET' && method !== 'DELETE' && method !== 'HEAD') { if (isFormData) { // FormData requires special headers from the form-data library Object.assign(headers, body.getHeaders()); options.body = body; } else { headers['Content-Type'] = 'application/json'; options.body = JSON.stringify(body); } } const response = await fetchWithTimeout(`https://api.openai.com/v1${endpoint}`, options); const data = await response.json(); if (!response.ok) { const error = new Error('OpenAI API request failed'); error.status = response.status; error.data = data; throw error; } return data; } // Helper function to call Mistral AI API async function callMistral(endpoint, body, method = 'POST', isFormData = false) { const apiKey = process.env.MISTRAL_API_KEY; if (!apiKey) { throw new Error('Mistral API key not configured'); } const headers = { 'Authorization': `Bearer ${apiKey}` }; const options = { method, headers }; // Only add body for POST/PUT/PATCH methods if (method !== 'GET' && method !== 'DELETE' && method !== 'HEAD') { if (isFormData) { // FormData requires special headers from the form-data library Object.assign(headers, body.getHeaders()); options.body = body; } else { headers['Content-Type'] = 'application/json'; options.body = JSON.stringify(body); } } const response = await fetchWithTimeout(`https://api.mistral.ai/v1${endpoint}`, options); const data = await response.json(); if (!response.ok) { const error = new Error('Mistral AI API request failed'); error.status = response.status; error.data = data; throw error; } return data; } // Helper function to call EOD Historical Data API async function callEOD(endpoint, queryParams = {}) { const apiKey = process.env.EOD_API_KEY; if (!apiKey) { throw new Error('EOD API key not configured'); } // Add API key + JSON format to all requests queryParams.api_token = apiKey; queryParams.fmt = 'json'; const queryString = new URLSearchParams(queryParams).toString(); const response = await fetchWithTimeout(`https://eodhd.com/api${endpoint}?${queryString}`); const data = await response.json(); if (!response.ok) { const error = new Error('EOD Historical Data API request failed'); error.status = response.status; error.data = data; throw error; } return data; } // Helper function to call Claude/Anthropic API async function callClaude(endpoint, body, method = 'POST', isFormData = false) { const apiKey = process.env.ANTHROPIC_API_KEY; if (!apiKey) { throw new Error('Anthropic API key not configured'); } const headers = { 'x-api-key': apiKey, 'anthropic-version': '2023-06-01' }; const options = { method, headers }; if (method !== 'GET' && method !== 'DELETE' && method !== 'HEAD') { if (isFormData) { // FormData requires special headers from the form-data library Object.assign(headers, body.getHeaders()); options.body = body; } else { headers['content-type'] = 'application/json'; options.body = JSON.stringify(body); } } const response = await fetchWithTimeout(`https://api.anthropic.com${endpoint}`, options); const data = await response.json(); if (!response.ok) { const error = new Error('Claude/Anthropic API request failed'); error.status = response.status; error.data = data; throw error; } return data; } // Helper function to call Perplexity API async function callPerplexity(endpoint, body, method = 'POST') { const apiKey = process.env.PERPLEXITY_API_KEY; if (!apiKey) { throw new Error('Perplexity API key not configured'); } const headers = { 'Authorization': `Bearer ${apiKey}`, 'Content-Type': 'application/json' }; const options = { method, headers }; if (method !== 'GET' && method !== 'DELETE' && method !== 'HEAD') { options.body = JSON.stringify(body); } const response = await fetchWithTimeout(`https://api.perplexity.ai${endpoint}`, options); const data = await response.json(); if (!response.ok) { const error = new Error('Perplexity API request failed'); error.status = response.status; error.data = data; throw error; } return data; } // Helper function to call Google Gemini API async function callGemini(endpoint, body, method = 'POST', isFormData = false) { const apiKey = process.env.GEMINI_API_KEY; if (!apiKey) { throw new Error('Gemini API key not configured'); } const headers = { 'x-goog-api-key': apiKey }; const options = { method, headers }; if (method !== 'GET' && method !== 'DELETE' && method !== 'HEAD') { if (isFormData) { // FormData requires special headers from the form-data library Object.assign(headers, body.getHeaders()); options.body = body; } else { headers['Content-Type'] = 'application/json'; options.body = JSON.stringify(body); } } const response = await fetchWithTimeout(`https://generativelanguage.googleapis.com/v1beta${endpoint}`, options); const data = await response.json(); if (!response.ok) { const error = new Error('Google Gemini API request failed'); error.status = response.status; error.data = data; throw error; } return data; } // Helper function to call RapidAPI async function callRapidAPI(host, endpoint, params = {}, method = 'GET') { const apiKey = process.env.RAPIDAPI_KEY; if (!apiKey) { throw new Error('RapidAPI key not configured'); } const headers = { 'x-rapidapi-key': apiKey, 'x-rapidapi-host': host, 'Content-Type': 'application/json' }; let url = `https://${host}${endpoint}`; const options = { method, headers }; if (method === 'GET' && Object.keys(params).length > 0) { const queryString = new URLSearchParams(params).toString(); url += `?${queryString}`; } else if (method === 'POST') { options.body = JSON.stringify(params); } const response = await fetchWithTimeout(url, options); const data = await response.json(); if (!response.ok) { const error = new Error('RapidAPI request failed'); error.status = response.status; error.data = data; throw error; } return data; } // ========================================== // OPENAI API ENDPOINTS // ========================================== // 1. CHAT COMPLETIONS app.post('/api/chat', aiChatLimiter, async (req, res) => { try { const { messages, model, max_tokens, temperature, stream, tools, tool_choice } = req.body; // Request validation if (!messages || !Array.isArray(messages) || messages.length === 0) { return res.status(400).json({ error: 'Bad Request', message: 'messages field is required and must be a non-empty array' }); } const apiKey = process.env.OPENAI_API_KEY; if (!apiKey) { return res.status(500).json({ error: 'OpenAI API key not configured' }); } const requestBody = { model: model || 'gpt-4o-mini', messages, max_tokens: max_tokens || 8000, temperature: temperature !== undefined ? temperature : 0.8, stream: stream || false, ...(tools && { tools }), ...(tool_choice && { tool_choice }) }; // Handle streaming responses if (stream) { const response = await fetchWithTimeout('https://api.openai.com/v1/chat/completions', { method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${apiKey}` }, body: JSON.stringify(requestBody) }); if (!response.ok) { const error = await response.json(); return res.status(response.status).json(error); } // Forward the stream res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); const reader = response.body.getReader(); const decoder = new TextDecoder(); try { while (true) { const { done, value } = await reader.read(); if (done) break; const chunk = decoder.decode(value, { stream: true }); res.write(chunk); } res.end(); } catch (streamError) { console.error('Streaming error:', streamError); res.end(); } } else { // Non-streaming request const data = await callOpenAI('/chat/completions', requestBody); res.json(data); } } catch (error) { console.error('Error calling OpenAI Chat API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. EMBEDDINGS app.post('/api/embeddings', async (req, res) => { try { const { input, model, encoding_format } = req.body; // Request validation if (!input || (typeof input !== 'string' && !Array.isArray(input))) { return res.status(400).json({ error: 'Bad Request', message: 'input field is required and must be a string or array of strings' }); } const data = await callOpenAI('/embeddings', { model: model || 'text-embedding-3-small', input, ...(encoding_format && { encoding_format }) }); res.json(data); } catch (error) { console.error('Error calling OpenAI Embeddings API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 3. IMAGE GENERATION (DALL-E) app.post('/api/images/generate', imageGenLimiter, async (req, res) => { try { const { prompt, model, n, size, quality, style, response_format } = req.body; // Request validation if (!prompt || typeof prompt !== 'string' || prompt.trim() === '') { return res.status(400).json({ error: 'Bad Request', message: 'prompt field is required and must be a non-empty string' }); } const data = await callOpenAI('/images/generations', { prompt, model: model || 'dall-e-3', n: n || 1, size: size || '1024x1024', quality: quality || 'standard', ...(style && { style }), response_format: response_format || 'url' }); res.json(data); } catch (error) { console.error('Error calling OpenAI Images API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 4. IMAGE EDITS app.post('/api/images/edit', imageGenLimiter, upload.fields([ { name: 'image', maxCount: 1 }, { name: 'mask', maxCount: 1 } ]), async (req, res) => { try { const { prompt, model, n, size, response_format } = req.body; // Request validation if (!prompt || typeof prompt !== 'string' || prompt.trim() === '') { return res.status(400).json({ error: 'Bad Request', message: 'prompt field is required and must be a non-empty string' }); } if (!req.files || !req.files.image) { return res.status(400).json({ error: 'Bad Request', message: 'image file is required for image editing' }); } const formData = new FormData(); formData.append('prompt', prompt); if (model) formData.append('model', model); if (n) formData.append('n', n); if (size) formData.append('size', size); if (response_format) formData.append('response_format', response_format); if (req.files.image) { formData.append('image', req.files.image[0].buffer, { filename: req.files.image[0].originalname, contentType: req.files.image[0].mimetype }); } if (req.files.mask) { formData.append('mask', req.files.mask[0].buffer, { filename: req.files.mask[0].originalname, contentType: req.files.mask[0].mimetype }); } const data = await callOpenAI('/images/edits', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling OpenAI Image Edit API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 5. IMAGE VARIATIONS app.post('/api/images/variations', imageGenLimiter, upload.single('image'), async (req, res) => { try { const { model, n, size, response_format } = req.body; // Request validation if (!req.file) { return res.status(400).json({ error: 'Bad Request', message: 'image file is required for creating variations' }); } const formData = new FormData(); if (model) formData.append('model', model); if (n) formData.append('n', n); if (size) formData.append('size', size); if (response_format) formData.append('response_format', response_format); if (req.file) { formData.append('image', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callOpenAI('/images/variations', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling OpenAI Image Variations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 6. AUDIO TRANSCRIPTION (Whisper) app.post('/api/audio/transcriptions', upload.single('file'), async (req, res) => { try { const { model, language, prompt, response_format, temperature } = req.body; // Request validation if (!req.file) { return res.status(400).json({ error: 'Bad Request', message: 'audio file is required for transcription' }); } const formData = new FormData(); formData.append('model', model || 'whisper-1'); if (language) formData.append('language', language); if (prompt) formData.append('prompt', prompt); if (response_format) formData.append('response_format', response_format); if (temperature) formData.append('temperature', temperature); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callOpenAI('/audio/transcriptions', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling OpenAI Transcription API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 7. AUDIO TRANSLATION (Whisper) app.post('/api/audio/translations', upload.single('file'), async (req, res) => { try { const { model, prompt, response_format, temperature } = req.body; // Request validation if (!req.file) { return res.status(400).json({ error: 'Bad Request', message: 'audio file is required for translation' }); } const formData = new FormData(); formData.append('model', model || 'whisper-1'); if (prompt) formData.append('prompt', prompt); if (response_format) formData.append('response_format', response_format); if (temperature) formData.append('temperature', temperature); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callOpenAI('/audio/translations', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling OpenAI Translation API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 8. TEXT-TO-SPEECH (TTS) app.post('/api/audio/speech', async (req, res) => { try { const { input, model, voice, response_format, speed } = req.body; const apiKey = process.env.OPENAI_API_KEY; // Request validation if (!apiKey) { return res.status(500).json({ error: 'OpenAI API key not configured' }); } if (!input || typeof input !== 'string' || input.trim() === '') { return res.status(400).json({ error: 'Bad Request', message: 'input field is required and must be a non-empty string' }); } const response = await fetchWithTimeout('https://api.openai.com/v1/audio/speech', { method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${apiKey}` }, body: JSON.stringify({ model: model || 'tts-1', input, voice: voice || 'alloy', response_format: response_format || 'mp3', ...(speed && { speed }) }) }); if (!response.ok) { const error = await response.json(); return res.status(response.status).json(error); } // Get audio buffer and send it const audioBuffer = await response.arrayBuffer(); res.setHeader('Content-Type', `audio/${response_format || 'mp3'}`); res.send(Buffer.from(audioBuffer)); } catch (error) { console.error('Error calling OpenAI TTS API:', error); res.status(500).json({ error: 'Internal server error' }); } }); // 9. MODERATIONS app.post('/api/moderations', async (req, res) => { try { const { input, model } = req.body; // Request validation if (!input || (typeof input !== 'string' && !Array.isArray(input))) { return res.status(400).json({ error: 'Bad Request', message: 'input field is required and must be a string or array' }); } const data = await callOpenAI('/moderations', { input, ...(model && { model }) }); res.json(data); } catch (error) { console.error('Error calling OpenAI Moderations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 10. MODELS LIST app.get('/api/models', async (req, res) => { try { const data = await callOpenAI('/models', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling OpenAI Models API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 11. FILES (for Assistants API) app.post('/api/files', upload.single('file'), async (req, res) => { try { const { purpose } = req.body; // Request validation if (!req.file) { return res.status(400).json({ error: 'Bad Request', message: 'file is required for upload' }); } const formData = new FormData(); formData.append('purpose', purpose || 'assistants'); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callOpenAI('/files', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling OpenAI Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/api/files', async (req, res) => { try { const data = await callOpenAI('/files', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling OpenAI Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.delete('/api/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callOpenAI(`/files/${file_id}`, {}, 'DELETE'); res.json(data); } catch (error) { console.error('Error calling OpenAI Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 12. ASSISTANTS API app.post('/api/assistants', async (req, res) => { try { const data = await callOpenAI('/assistants', req.body); res.json(data); } catch (error) { console.error('Error calling OpenAI Assistants API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/api/assistants', async (req, res) => { try { const data = await callOpenAI('/assistants', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling OpenAI Assistants API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 13. THREADS API app.post('/api/threads', async (req, res) => { try { const data = await callOpenAI('/threads', req.body); res.json(data); } catch (error) { console.error('Error calling OpenAI Threads API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 14. MESSAGES API app.post('/api/threads/:thread_id/messages', async (req, res) => { try { const { thread_id } = req.params; const data = await callOpenAI(`/threads/${thread_id}/messages`, req.body); res.json(data); } catch (error) { console.error('Error calling OpenAI Messages API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 15. RUNS API app.post('/api/threads/:thread_id/runs', async (req, res) => { try { const { thread_id } = req.params; const data = await callOpenAI(`/threads/${thread_id}/runs`, req.body); res.json(data); } catch (error) { console.error('Error calling OpenAI Runs API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // MISTRAL AI API ENDPOINTS // ========================================== // 1. CHAT COMPLETIONS app.post('/mistral/chat', aiChatLimiter, async (req, res) => { try { const { messages, model, max_tokens, temperature, stream, tools, tool_choice } = req.body; // Request validation if (!messages || !Array.isArray(messages) || messages.length === 0) { return res.status(400).json({ error: 'Bad Request', message: 'messages field is required and must be a non-empty array' }); } const requestBody = { model: model || 'mistral-large-latest', messages, max_tokens: max_tokens || 8000, temperature: temperature !== undefined ? temperature : 0.7, stream: stream || false, ...(tools && { tools }), ...(tool_choice && { tool_choice }) }; // Handle streaming responses if (stream) { const apiKey = process.env.MISTRAL_API_KEY; const response = await fetchWithTimeout('https://api.mistral.ai/v1/chat/completions', { method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${apiKey}` }, body: JSON.stringify(requestBody) }); if (!response.ok) { const error = await response.json(); return res.status(response.status).json(error); } res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); const reader = response.body.getReader(); const decoder = new TextDecoder(); try { while (true) { const { done, value } = await reader.read(); if (done) break; const chunk = decoder.decode(value, { stream: true }); res.write(chunk); } res.end(); } catch (streamError) { console.error('Streaming error:', streamError); res.end(); } } else { const data = await callMistral('/chat/completions', requestBody); res.json(data); } } catch (error) { console.error('Error calling Mistral Chat API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. EMBEDDINGS app.post('/mistral/embeddings', async (req, res) => { try { const { input, model } = req.body; const data = await callMistral('/embeddings', { model: model || 'mistral-embed', input }); res.json(data); } catch (error) { console.error('Error calling Mistral Embeddings API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 3. FIM COMPLETIONS app.post('/mistral/fim/completions', async (req, res) => { try { const { model, prompt, suffix, max_tokens, temperature } = req.body; const data = await callMistral('/fim/completions', { model: model || 'codestral-2508', prompt, suffix, max_tokens: max_tokens || 1024, temperature: temperature || 0.7 }); res.json(data); } catch (error) { console.error('Error calling Mistral FIM API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 4. FILES API app.post('/mistral/files', upload.single('file'), async (req, res) => { try { const { purpose } = req.body; const formData = new FormData(); formData.append('purpose', purpose || 'fine-tune'); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callMistral('/files', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling Mistral Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/files', async (req, res) => { try { const data = await callMistral('/files', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callMistral(`/files/${file_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.delete('/mistral/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callMistral(`/files/${file_id}`, {}, 'DELETE'); res.json(data); } catch (error) { console.error('Error calling Mistral Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 5. FINE-TUNING API app.post('/mistral/fine_tuning/jobs', async (req, res) => { try { const data = await callMistral('/fine_tuning/jobs', req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/fine_tuning/jobs', async (req, res) => { try { const data = await callMistral('/fine_tuning/jobs', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/fine_tuning/jobs/:job_id', async (req, res) => { try { const { job_id } = req.params; const data = await callMistral(`/fine_tuning/jobs/${job_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/fine_tuning/jobs/:job_id/start', async (req, res) => { try { const { job_id } = req.params; const data = await callMistral(`/fine_tuning/jobs/${job_id}/start`, {}); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/fine_tuning/jobs/:job_id/cancel', async (req, res) => { try { const { job_id } = req.params; const data = await callMistral(`/fine_tuning/jobs/${job_id}/cancel`, {}); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/fine_tuning/models/:model_id/archive', async (req, res) => { try { const { model_id } = req.params; const data = await callMistral(`/fine_tuning/models/${model_id}/archive`, {}); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/fine_tuning/models/:model_id/unarchive', async (req, res) => { try { const { model_id } = req.params; const data = await callMistral(`/fine_tuning/models/${model_id}/unarchive`, {}); res.json(data); } catch (error) { console.error('Error calling Mistral Fine-tuning API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 6. BATCH API app.post('/mistral/batch/jobs', async (req, res) => { try { const data = await callMistral('/batch/jobs', req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Batch API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/batch/jobs', async (req, res) => { try { const data = await callMistral('/batch/jobs', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Batch API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/batch/jobs/:job_id', async (req, res) => { try { const { job_id } = req.params; const data = await callMistral(`/batch/jobs/${job_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Batch API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/batch/jobs/:job_id/cancel', async (req, res) => { try { const { job_id } = req.params; const data = await callMistral(`/batch/jobs/${job_id}/cancel`, {}); res.json(data); } catch (error) { console.error('Error calling Mistral Batch API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 7. MODERATION API app.post('/mistral/moderations', async (req, res) => { try { const { input, model } = req.body; const data = await callMistral('/moderations', { input, model }); res.json(data); } catch (error) { console.error('Error calling Mistral Moderations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/chat/moderations', async (req, res) => { try { const { input, model } = req.body; const data = await callMistral('/chat/moderations', { input, model }); res.json(data); } catch (error) { console.error('Error calling Mistral Chat Moderations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 8. AUDIO TRANSCRIPTION app.post('/mistral/audio/transcriptions', upload.single('file'), async (req, res) => { try { const { model, language, prompt } = req.body; const formData = new FormData(); formData.append('model', model || 'voxtral-mini-2507'); if (language) formData.append('language', language); if (prompt) formData.append('prompt', prompt); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callMistral('/audio/transcriptions', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling Mistral Audio API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 9. AGENTS API app.post('/mistral/agents', async (req, res) => { try { const data = await callMistral('/agents', req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Agents API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/agents', async (req, res) => { try { const data = await callMistral('/agents', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Agents API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/agents/:agent_id', async (req, res) => { try { const { agent_id } = req.params; const data = await callMistral(`/agents/${agent_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Agents API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.put('/mistral/agents/:agent_id', async (req, res) => { try { const { agent_id } = req.params; const data = await callMistral(`/agents/${agent_id}`, req.body, 'PUT'); res.json(data); } catch (error) { console.error('Error calling Mistral Agents API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/agents/:agent_id/version', async (req, res) => { try { const { agent_id } = req.params; const data = await callMistral(`/agents/${agent_id}/version`, req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Agents API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 10. CONVERSATIONS API app.post('/mistral/conversations', async (req, res) => { try { const data = await callMistral('/conversations', req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/conversations', async (req, res) => { try { const data = await callMistral('/conversations', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/conversations/:conversation_id', async (req, res) => { try { const { conversation_id } = req.params; const data = await callMistral(`/conversations/${conversation_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/conversations/:conversation_id/entries', async (req, res) => { try { const { conversation_id } = req.params; const data = await callMistral(`/conversations/${conversation_id}/entries`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/conversations/:conversation_id/messages', async (req, res) => { try { const { conversation_id } = req.params; const data = await callMistral(`/conversations/${conversation_id}/messages`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/conversations/:conversation_id/branch', async (req, res) => { try { const { conversation_id } = req.params; const data = await callMistral(`/conversations/${conversation_id}/branch`, req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/mistral/conversations/:conversation_id/complete', async (req, res) => { try { const { conversation_id } = req.params; const data = await callMistral(`/conversations/${conversation_id}/complete`, req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Conversations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 11. DOCUMENT LIBRARIES API app.post('/mistral/libraries', async (req, res) => { try { const data = await callMistral('/libraries', req.body); res.json(data); } catch (error) { console.error('Error calling Mistral Libraries API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/libraries', async (req, res) => { try { const data = await callMistral('/libraries', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Libraries API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/mistral/libraries/:library_id', async (req, res) => { try { const { library_id } = req.params; const data = await callMistral(`/libraries/${library_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Libraries API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.delete('/mistral/libraries/:library_id', async (req, res) => { try { const { library_id } = req.params; const data = await callMistral(`/libraries/${library_id}`, {}, 'DELETE'); res.json(data); } catch (error) { console.error('Error calling Mistral Libraries API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 12. MODELS LIST app.get('/mistral/models', async (req, res) => { try { const data = await callMistral('/models', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Mistral Models API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // EOD HISTORICAL DATA API ENDPOINTS // ========================================== // 1. HISTORICAL DATA app.get('/eod/historical/:symbol', async (req, res) => { try { const { symbol } = req.params; const { from, to, period } = req.query; const data = await callEOD(`/eod/${symbol}`, { from, to, period }); res.json(data); } catch (error) { console.error('Error calling EOD Historical API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. REALTIME PRICES app.get('/eod/realtime/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callEOD(`/real-time/${symbol}`); res.json(data); } catch (error) { console.error('Error calling EOD Realtime API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 3. INTRADAY DATA app.get('/eod/intraday/:symbol', async (req, res) => { try { const { symbol } = req.params; const { interval } = req.query; const data = await callEOD(`/intraday/${symbol}`, { interval: interval || '5m' }); res.json(data); } catch (error) { console.error('Error calling EOD Intraday API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 4. FUNDAMENTALS app.get('/eod/fundamentals/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callEOD(`/fundamentals/${symbol}`); res.json(data); } catch (error) { console.error('Error calling EOD Fundamentals API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 5. SEARCH app.get('/eod/search/:query', async (req, res) => { try { const { query } = req.params; const data = await callEOD(`/search/${query}`); res.json(data); } catch (error) { console.error('Error calling EOD Search API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 6. EXCHANGE SYMBOLS app.get('/eod/exchange-symbols/:exchange', async (req, res) => { try { const { exchange } = req.params; const data = await callEOD(`/exchange-symbol-list/${exchange}`); res.json(data); } catch (error) { console.error('Error calling EOD Exchange Symbols API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 7. NEWS app.get('/eod/news', async (req, res) => { try { const { s, offset, limit } = req.query; const data = await callEOD('/news', { s: s || '', offset: offset || '0', limit: limit || '50' }); res.json(data); } catch (error) { console.error('Error calling EOD News API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 8. DIVIDENDS app.get('/eod/dividends/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callEOD(`/div/${symbol}`); res.json(data); } catch (error) { console.error('Error calling EOD Dividends API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // CLAUDE/ANTHROPIC API ENDPOINTS // ========================================== // 1. MESSAGES API (Chat with Streaming) app.post('/claude/messages', aiChatLimiter, async (req, res) => { try { const { model, messages, max_tokens, system, temperature, top_p, top_k, stream, tools, metadata, stop_sequences } = req.body; // Request validation if (!messages || !Array.isArray(messages) || messages.length === 0) { return res.status(400).json({ error: 'Bad Request', message: 'messages field is required and must be a non-empty array' }); } const requestBody = { model: model || 'claude-sonnet-4-5-20250929', messages, max_tokens: max_tokens || 1024, ...(system && { system }), ...(temperature !== undefined && { temperature }), ...(top_p !== undefined && { top_p }), ...(top_k !== undefined && { top_k }), ...(tools && { tools }), ...(metadata && { metadata }), ...(stop_sequences && { stop_sequences }), stream: stream || false }; if (stream) { const response = await fetchWithTimeout('https://api.anthropic.com/v1/messages', { method: 'POST', headers: { 'x-api-key': process.env.ANTHROPIC_API_KEY, 'anthropic-version': '2023-06-01', 'content-type': 'application/json' }, body: JSON.stringify(requestBody) }); if (!response.ok) { const error = await response.json(); return res.status(response.status).json(error); } res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); const reader = response.body.getReader(); const decoder = new TextDecoder(); try { while (true) { const { done, value } = await reader.read(); if (done) break; const chunk = decoder.decode(value, { stream: true }); res.write(chunk); } res.end(); } catch (streamError) { console.error('Streaming error:', streamError); res.end(); } } else { const data = await callClaude('/v1/messages', requestBody); res.json(data); } } catch (error) { console.error('Error calling Claude Messages API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. MESSAGE BATCHES (50% cost reduction) app.post('/claude/messages/batches', async (req, res) => { try { const { requests } = req.body; const data = await callClaude('/v1/messages/batches', { requests }); res.json(data); } catch (error) { console.error('Error calling Claude Batches API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/claude/messages/batches/:batch_id', async (req, res) => { try { const { batch_id } = req.params; const data = await callClaude(`/v1/messages/batches/${batch_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Claude Batches API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/claude/messages/batches/:batch_id/results', async (req, res) => { try { const { batch_id } = req.params; const data = await callClaude(`/v1/messages/batches/${batch_id}/results`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Claude Batches API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 3. TOKEN COUNTING app.post('/claude/messages/count_tokens', async (req, res) => { try { const { model, messages, system } = req.body; const data = await callClaude('/v1/messages/count_tokens', { model: model || 'claude-sonnet-4-5-20250929', messages, ...(system && { system }) }); res.json(data); } catch (error) { console.error('Error calling Claude Token Count API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 4. FILES API app.post('/claude/files', upload.single('file'), async (req, res) => { try { const formData = new FormData(); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callClaude('/v1/files', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling Claude Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/claude/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callClaude(`/v1/files/${file_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Claude Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.delete('/claude/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callClaude(`/v1/files/${file_id}`, {}, 'DELETE'); res.json(data); } catch (error) { console.error('Error calling Claude Files API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 5. ORGANIZATION & ADMIN app.get('/claude/organizations/:organization_id', async (req, res) => { try { const { organization_id } = req.params; const data = await callClaude(`/v1/organizations/${organization_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Claude Organizations API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 6. USAGE & COST app.get('/claude/organization/usage', async (req, res) => { try { const data = await callClaude('/v1/organization/usage', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Claude Usage API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // PERPLEXITY API ENDPOINTS // ========================================== // 1. CHAT COMPLETIONS (Web-grounded Sonar Models) app.post('/perplexity/chat/completions', aiChatLimiter, async (req, res) => { try { const { model, messages, max_tokens, temperature, top_p, stream, search_context_size, search_domain_filter, search_recency_filter, return_citations, return_images, return_related_questions } = req.body; // Request validation if (!messages || !Array.isArray(messages) || messages.length === 0) { return res.status(400).json({ error: 'Bad Request', message: 'messages field is required and must be a non-empty array' }); } const requestBody = { model: model || 'sonar-pro', messages, ...(max_tokens && { max_tokens }), ...(temperature !== undefined && { temperature }), ...(top_p !== undefined && { top_p }), stream: stream || false, ...(search_context_size && { search_context_size }), ...(search_domain_filter && { search_domain_filter }), ...(search_recency_filter && { search_recency_filter }), ...(return_citations !== undefined && { return_citations }), ...(return_images !== undefined && { return_images }), ...(return_related_questions !== undefined && { return_related_questions }) }; if (stream) { const response = await fetchWithTimeout('https://api.perplexity.ai/chat/completions', { method: 'POST', headers: { 'Authorization': `Bearer ${process.env.PERPLEXITY_API_KEY}`, 'Content-Type': 'application/json' }, body: JSON.stringify(requestBody) }); if (!response.ok) { const error = await response.json(); return res.status(response.status).json(error); } res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); const reader = response.body.getReader(); const decoder = new TextDecoder(); try { while (true) { const { done, value } = await reader.read(); if (done) break; const chunk = decoder.decode(value, { stream: true }); res.write(chunk); } res.end(); } catch (streamError) { console.error('Streaming error:', streamError); res.end(); } } else { const data = await callPerplexity('/chat/completions', requestBody); res.json(data); } } catch (error) { console.error('Error calling Perplexity Chat API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. SEARCH API (Direct Search Infrastructure) app.post('/perplexity/search', async (req, res) => { try { const { query, search_context, search_recency_filter, search_domain_filter, return_images, return_related_questions } = req.body; const data = await callPerplexity('/search', { query, ...(search_context && { search_context }), ...(search_recency_filter && { search_recency_filter }), ...(search_domain_filter && { search_domain_filter }), ...(return_images !== undefined && { return_images }), ...(return_related_questions !== undefined && { return_related_questions }) }); res.json(data); } catch (error) { console.error('Error calling Perplexity Search API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 3. SEARCH CACHE STATS app.get('/perplexity/search/cache-stats', async (req, res) => { try { const data = await callPerplexity('/search/cache-stats', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Perplexity Cache Stats API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // GOOGLE GEMINI API ENDPOINTS // ========================================== // 1. GENERATE CONTENT (Non-streaming) app.post('/gemini/models/:model/generateContent', aiChatLimiter, async (req, res) => { try { const { model } = req.params; const apiKey = process.env.GEMINI_API_KEY; const response = await fetchWithTimeout(`https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${apiKey}`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(req.body) }); const data = await response.json(); if (!response.ok) { return res.status(response.status).json(data); } res.json(data); } catch (error) { console.error('Error calling Gemini Generate Content API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. STREAM GENERATE CONTENT (Streaming) app.post('/gemini/models/:model/streamGenerateContent', aiChatLimiter, async (req, res) => { try { const { model } = req.params; const apiKey = process.env.GEMINI_API_KEY; const response = await fetchWithTimeout(`https://generativelanguage.googleapis.com/v1beta/models/${model}:streamGenerateContent?key=${apiKey}&alt=sse`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(req.body) }); if (!response.ok) { const error = await response.json(); return res.status(response.status).json(error); } res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); const reader = response.body.getReader(); const decoder = new TextDecoder(); try { while (true) { const { done, value } = await reader.read(); if (done) break; const chunk = decoder.decode(value, { stream: true }); res.write(chunk); } res.end(); } catch (streamError) { console.error('Streaming error:', streamError); res.end(); } } catch (error) { console.error('Error calling Gemini Stream API:', error); res.status(500).json({ error: 'Internal server error' }); } }); // 3. BATCH GENERATE CONTENT app.post('/gemini/models/:model/batchGenerateContent', async (req, res) => { try { const { model } = req.params; const data = await callGemini(`/models/${model}:batchGenerateContent`, req.body); res.json(data); } catch (error) { console.error('Error calling Gemini Batch API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 4. EMBED CONTENT app.post('/gemini/models/:model/embedContent', async (req, res) => { try { const { model } = req.params; const data = await callGemini(`/models/${model}:embedContent`, req.body); res.json(data); } catch (error) { console.error('Error calling Gemini Embed API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 5. BATCH EMBED CONTENT app.post('/gemini/models/:model/batchEmbedContent', async (req, res) => { try { const { model } = req.params; const data = await callGemini(`/models/${model}:batchEmbedContent`, req.body); res.json(data); } catch (error) { console.error('Error calling Gemini Batch Embed API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 6. COUNT TOKENS app.post('/gemini/models/:model/countTokens', async (req, res) => { try { const { model } = req.params; const data = await callGemini(`/models/${model}:countTokens`, req.body); res.json(data); } catch (error) { console.error('Error calling Gemini Count Tokens API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 7. MODELS LIST app.get('/gemini/models', async (req, res) => { try { const data = await callGemini('/models', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Gemini Models List API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 8. MODELS GET app.get('/gemini/models/:model', async (req, res) => { try { const { model } = req.params; const data = await callGemini(`/models/${model}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Gemini Models Get API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 9. FILES UPLOAD app.post('/gemini/files', upload.single('file'), async (req, res) => { try { const formData = new FormData(); if (req.file) { formData.append('file', req.file.buffer, { filename: req.file.originalname, contentType: req.file.mimetype }); } const data = await callGemini('/files', formData, 'POST', true); res.json(data); } catch (error) { console.error('Error calling Gemini Files Upload API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 10. FILES LIST app.get('/gemini/files', async (req, res) => { try { const data = await callGemini('/files', {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Gemini Files List API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 11. FILES GET app.get('/gemini/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callGemini(`/files/${file_id}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Gemini Files Get API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 12. FILES DELETE app.delete('/gemini/files/:file_id', async (req, res) => { try { const { file_id } = req.params; const data = await callGemini(`/files/${file_id}`, {}, 'DELETE'); res.json(data); } catch (error) { console.error('Error calling Gemini Files Delete API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 13. IMAGE GENERATION (Imagen 4) app.post('/gemini/models/imagen-4/generateImage', imageGenLimiter, async (req, res) => { try { const { prompt, numberOfImages, aspectRatio, safetySettings } = req.body; const requestBody = { prompt, ...(numberOfImages && { numberOfImages }), ...(aspectRatio && { aspectRatio }), ...(safetySettings && { safetySettings }) }; const data = await callGemini('/models/imagen-4:generateImage', requestBody); res.json(data); } catch (error) { console.error('Error calling Gemini Imagen API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 14. VIDEO GENERATION (Veo 3) app.post('/gemini/models/veo-3/generateVideo', async (req, res) => { try { const { prompt, duration, aspectRatio } = req.body; const requestBody = { prompt, ...(duration && { duration }), ...(aspectRatio && { aspectRatio }) }; const data = await callGemini('/models/veo-3:generateVideo', requestBody); res.json(data); } catch (error) { console.error('Error calling Gemini Veo API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // RAPIDAPI ENDPOINTS // ========================================== // 1. JOBS API (JSearch) app.post('/rapidapi/jobs/search', async (req, res) => { try { const { query, page, num_pages, date_posted, remote_jobs_only } = req.body; const params = { query: query || 'Python developer', page: page || 1, num_pages: num_pages || 1, ...(date_posted && { date_posted }), ...(remote_jobs_only && { remote_jobs_only }) }; const data = await callRapidAPI('jsearch.p.rapidapi.com', '/search', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Jobs Search API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/rapidapi/jobs/details', async (req, res) => { try { const { job_id } = req.body; const params = { job_id, extended_publisher_details: 'false' }; const data = await callRapidAPI('jsearch.p.rapidapi.com', '/job-details', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Job Details API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/rapidapi/jobs/salary', async (req, res) => { try { const { job_title, location, radius } = req.body; const params = { job_title: job_title || 'nodejs developer', location: location || 'New York, NY, USA', ...(radius && { radius }) }; const data = await callRapidAPI('jsearch.p.rapidapi.com', '/estimated-salary', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Salary Estimate API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 2. YAHOO FINANCE API app.get('/rapidapi/yahoo/key-statistics/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callRapidAPI('yahoo-finance127.p.rapidapi.com', `/key-statistics/${symbol}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Yahoo Finance Key Statistics API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/rapidapi/yahoo/financial-analysis/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callRapidAPI('yahoo-finance127.p.rapidapi.com', `/financial-analysis/${symbol}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Yahoo Finance Analysis API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/rapidapi/yahoo/earnings-trend/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callRapidAPI('yahoo-finance127.p.rapidapi.com', `/earnings-trend/${symbol}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Yahoo Finance Earnings API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/rapidapi/yahoo/price/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callRapidAPI('yahoo-finance127.p.rapidapi.com', `/price/${symbol}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Yahoo Finance Price API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.post('/rapidapi/yahoo/multi-quote', async (req, res) => { try { const { symbols } = req.body; // Request validation if (!symbols || !Array.isArray(symbols) || symbols.length === 0) { return res.status(400).json({ error: 'Bad Request', message: 'symbols field is required and must be a non-empty array' }); } const params = { symbols: symbols.join(',') }; const data = await callRapidAPI('yahoo-finance127.p.rapidapi.com', '/multi-quote', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Yahoo Finance Multi Quote API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); app.get('/rapidapi/yahoo/news/:symbol', async (req, res) => { try { const { symbol } = req.params; const data = await callRapidAPI('yahoo-finance127.p.rapidapi.com', `/news/${symbol}`, {}, 'GET'); res.json(data); } catch (error) { console.error('Error calling Yahoo Finance News API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 3. GOOGLE SEARCH API app.post('/rapidapi/google/search', async (req, res) => { try { const { query, limit, related_keywords } = req.body; const params = { query: query || '', limit: limit || 10, related_keywords: related_keywords || 'true' }; const data = await callRapidAPI('google-search74.p.rapidapi.com', '/', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Google Search API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 4. WEB SCRAPER API app.post('/rapidapi/scraper/contacts', async (req, res) => { try { const { query } = req.body; const params = { query: query || 'https://example.com' }; const data = await callRapidAPI('website-contacts-scraper.p.rapidapi.com', '/scrape-contacts', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Web Scraper API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // 5. AMAZON PRODUCT SEARCH API app.post('/rapidapi/amazon/search', async (req, res) => { try { const { query, page, country } = req.body; const params = { query: query || '', page: page || '1', country: country || 'US' }; const data = await callRapidAPI('real-time-amazon-data.p.rapidapi.com', '/search', params, 'GET'); res.json(data); } catch (error) { console.error('Error calling Amazon Product Search API:', error); res.status(error.status || 500).json({ error: error.data || 'Internal server error' }); } }); // ========================================== // HEALTH CHECK // ========================================== app.get('/health', (req, res) => { res.json({ status: 'ok', message: 'Universal AI & Financial Data Proxy Server is running', apis: { openai: '15 endpoints', mistral: '36 endpoints', claude: '10 endpoints', perplexity: '3 endpoints', gemini: '14 endpoints', rapidapi: '12 endpoints', eod: '8 endpoints' }, total_endpoints: 98 }); }); // ========================================== // ENVIRONMENT VALIDATION // ========================================== const requiredEnvs = [ 'OPENAI_API_KEY', 'MISTRAL_API_KEY', 'ANTHROPIC_API_KEY', 'PERPLEXITY_API_KEY', 'GEMINI_API_KEY', 'RAPIDAPI_KEY', 'EOD_API_KEY' ]; const missingEnvs = requiredEnvs.filter(key => !process.env[key]); if (missingEnvs.length > 0) { console.error(` ╔═══════════════════════════════════════════════════════════╗ ║ ⚠️ WARNING: Missing API Keys ║ ╠═══════════════════════════════════════════════════════════╣ ║ The following API keys are not configured: ║ ║ ${missingEnvs.map(k => `• ${k}`).join('\n║ ')} ║ ║ ║ Some endpoints will not work without these keys. ║ ║ Set them in your environment or Replit Secrets. ║ ╚═══════════════════════════════════════════════════════════╝ `); } // Start server app.listen(PORT, '0.0.0.0', () => { console.log(` ╔═══════════════════════════════════════════════════════════╗ ║ 🚀 UNIVERSAL AI & FINANCIAL DATA PROXY SERVER ║ ║ ✅ Server running on http://0.0.0.0:${PORT} ║ ║ ║ ║ 🤖 OpenAI API (15 endpoints): ║ ║ • Chat, Embeddings, Images, Audio, Moderations ║ ║ • Models, Files, Assistants, Threads ║ ║ ║ ║ 🧠 Mistral AI API (36 endpoints): ║ ║ • Chat, Embeddings, FIM, Files, Fine-tuning ║ ║ • Batch, Moderation, Audio, Agents, Conversations ║ ║ ║ ║ 🤖 Claude/Anthropic API (10 endpoints): ║ ║ • Messages, Batches, Token Count, Files ║ ║ • Organization, Usage, Vision, Tools ║ ║ ║ ║ 🔍 Perplexity API (3 endpoints): ║ ║ • Sonar Chat, Search API, Cache Stats ║ ║ • Web-grounded with Citations ║ ║ ║ ║ 💎 Google Gemini API (14 endpoints): ║ ║ • Generate, Stream, Batch, Embeddings ║ ║ • Files, Models, Imagen 4, Veo 3 ║ ║ ║ ║ ⚡ RapidAPI (12 endpoints): ║ ║ • Jobs Search (JSearch), Yahoo Finance ║ ║ • Google Search, Web Scraper ║ ║ ║ ║ 📈 EOD Historical Data API (8 endpoints): ║ ║ • Historical, Realtime, Intraday, Fundamentals ║ ║ • Search, Exchange Symbols, News, Dividends ║ ║ ║ ║ 🔍 Health Check: /health ║ ║ 📊 Total: 98 API Endpoints ║ ╚═══════════════════════════════════════════════════════════╝ `); });