diff --git a/src/components/views/MainView.js b/src/components/views/MainView.js
index 92289f0..e92f5e8 100644
--- a/src/components/views/MainView.js
+++ b/src/components/views/MainView.js
@@ -414,6 +414,10 @@ export class MainView extends LitElement {
_geminiKey: { state: true },
_groqKey: { state: true },
_openaiKey: { state: true },
+ _openaiCompatibleApiKey: { state: true },
+ _openaiCompatibleBaseUrl: { state: true },
+ _openaiCompatibleModel: { state: true },
+ _responseProvider: { state: true },
_tokenError: { state: true },
_keyError: { state: true },
// Local AI state
@@ -437,6 +441,10 @@ export class MainView extends LitElement {
this._geminiKey = '';
this._groqKey = '';
this._openaiKey = '';
+ this._openaiCompatibleApiKey = '';
+ this._openaiCompatibleBaseUrl = '';
+ this._openaiCompatibleModel = '';
+ this._responseProvider = 'gemini';
this._tokenError = false;
this._keyError = false;
this._showLocalHelp = false;
@@ -467,6 +475,15 @@ export class MainView extends LitElement {
this._geminiKey = await cheatingDaddy.storage.getApiKey().catch(() => '') || '';
this._groqKey = await cheatingDaddy.storage.getGroqApiKey().catch(() => '') || '';
this._openaiKey = creds.openaiKey || '';
+
+ // Load OpenAI-compatible config
+ const openaiConfig = await cheatingDaddy.storage.getOpenAICompatibleConfig().catch(() => ({}));
+ this._openaiCompatibleApiKey = openaiConfig.apiKey || '';
+ this._openaiCompatibleBaseUrl = openaiConfig.baseUrl || '';
+ this._openaiCompatibleModel = openaiConfig.model || '';
+
+ // Load response provider preference
+ this._responseProvider = prefs.responseProvider || 'gemini';
// Load local AI settings
this._ollamaHost = prefs.ollamaHost || 'http://127.0.0.1:11434';
@@ -631,6 +648,42 @@ export class MainView extends LitElement {
this.requestUpdate();
}
+ async _saveOpenAICompatibleApiKey(val) {
+ this._openaiCompatibleApiKey = val;
+ await cheatingDaddy.storage.setOpenAICompatibleConfig(
+ val,
+ this._openaiCompatibleBaseUrl,
+ this._openaiCompatibleModel
+ );
+ this.requestUpdate();
+ }
+
+ async _saveOpenAICompatibleBaseUrl(val) {
+ this._openaiCompatibleBaseUrl = val;
+ await cheatingDaddy.storage.setOpenAICompatibleConfig(
+ this._openaiCompatibleApiKey,
+ val,
+ this._openaiCompatibleModel
+ );
+ this.requestUpdate();
+ }
+
+ async _saveOpenAICompatibleModel(val) {
+ this._openaiCompatibleModel = val;
+ await cheatingDaddy.storage.setOpenAICompatibleConfig(
+ this._openaiCompatibleApiKey,
+ this._openaiCompatibleBaseUrl,
+ val
+ );
+ this.requestUpdate();
+ }
+
+ async _saveResponseProvider(val) {
+ this._responseProvider = val;
+ await cheatingDaddy.storage.updatePreference('responseProvider', val);
+ this.requestUpdate();
+ }
+
async _saveOllamaHost(val) {
this._ollamaHost = val;
await cheatingDaddy.storage.updatePreference('ollamaHost', val);
@@ -715,29 +768,75 @@ export class MainView extends LitElement {
this._saveGeminiKey(e.target.value)}
class=${this._keyError ? 'error' : ''}
/>
- this.onExternalLink('https://aistudio.google.com/apikey')}>Get Gemini key
+ this.onExternalLink('https://aistudio.google.com/apikey')}>Get Gemini key - Always used for audio transcription
+ ${this._responseProvider === 'groq' ? html`
+
+ ` : ''}
+
+ ${this._responseProvider === 'openai-compatible' ? html`
+
+ ` : ''}
+
${this._renderStartButton()}
`;
}
diff --git a/src/storage.js b/src/storage.js
index c09ad52..d38918c 100644
--- a/src/storage.js
+++ b/src/storage.js
@@ -13,7 +13,10 @@ const DEFAULT_CONFIG = {
const DEFAULT_CREDENTIALS = {
apiKey: '',
- groqApiKey: ''
+ groqApiKey: '',
+ openaiCompatibleApiKey: '',
+ openaiCompatibleBaseUrl: '',
+ openaiCompatibleModel: ''
};
const DEFAULT_PREFERENCES = {
@@ -27,6 +30,7 @@ const DEFAULT_PREFERENCES = {
fontSize: 'medium',
backgroundTransparency: 0.8,
googleSearchEnabled: false,
+ responseProvider: 'gemini',
ollamaHost: 'http://127.0.0.1:11434',
ollamaModel: 'llama3.1',
whisperModel: 'Xenova/whisper-small',
@@ -204,6 +208,23 @@ function setGroqApiKey(groqApiKey) {
return setCredentials({ groqApiKey });
}
+function getOpenAICompatibleConfig() {
+ const creds = getCredentials();
+ return {
+ apiKey: creds.openaiCompatibleApiKey || '',
+ baseUrl: creds.openaiCompatibleBaseUrl || '',
+ model: creds.openaiCompatibleModel || ''
+ };
+}
+
+function setOpenAICompatibleConfig(apiKey, baseUrl, model) {
+ return setCredentials({
+ openaiCompatibleApiKey: apiKey,
+ openaiCompatibleBaseUrl: baseUrl,
+ openaiCompatibleModel: model
+ });
+}
+
// ============ PREFERENCES ============
function getPreferences() {
@@ -500,6 +521,8 @@ module.exports = {
setApiKey,
getGroqApiKey,
setGroqApiKey,
+ getOpenAICompatibleConfig,
+ setOpenAICompatibleConfig,
// Preferences
getPreferences,
diff --git a/src/utils/gemini.js b/src/utils/gemini.js
index 572f501..a06315c 100644
--- a/src/utils/gemini.js
+++ b/src/utils/gemini.js
@@ -3,7 +3,7 @@ const { BrowserWindow, ipcMain } = require('electron');
const { spawn } = require('child_process');
const { saveDebugAudio } = require('../audioUtils');
const { getSystemPrompt } = require('./prompts');
-const { getAvailableModel, incrementLimitCount, getApiKey, getGroqApiKey, incrementCharUsage, getModelForToday } = require('../storage');
+const { getAvailableModel, incrementLimitCount, getApiKey, getGroqApiKey, getOpenAICompatibleConfig, incrementCharUsage, getModelForToday } = require('../storage');
// Lazy-loaded to avoid circular dependency (localai.js imports from gemini.js)
let _localai = null;
@@ -15,6 +15,9 @@ function getLocalAi() {
// Provider mode: 'byok' or 'local'
let currentProviderMode = 'byok';
+// Response provider: 'gemini', 'groq', or 'openai-compatible'
+let currentResponseProvider = 'gemini';
+
// Groq conversation history for context
let groqConversationHistory = [];
@@ -205,6 +208,14 @@ function hasGroqKey() {
return key && key.trim() != ''
}
+// helper to check if OpenAI-compatible API has been configured
+function hasOpenAICompatibleConfig() {
+ const config = getOpenAICompatibleConfig();
+ return config.apiKey && config.apiKey.trim() !== '' &&
+ config.baseUrl && config.baseUrl.trim() !== '' &&
+ config.model && config.model.trim() !== '';
+}
+
function trimConversationHistoryForGemma(history, maxChars=42000) {
if(!history || history.length === 0) return [];
let totalChars = 0;
@@ -344,6 +355,128 @@ async function sendToGroq(transcription) {
}
}
+async function sendToOpenAICompatible(transcription) {
+ const config = getOpenAICompatibleConfig();
+
+ if (!config.apiKey || !config.baseUrl || !config.model) {
+ console.log('OpenAI-compatible API not fully configured');
+ return;
+ }
+
+ if (!transcription || transcription.trim() === '') {
+ console.log('Empty transcription, skipping OpenAI-compatible API');
+ return;
+ }
+
+ console.log(`Sending to OpenAI-compatible API (${config.model}):`, transcription.substring(0, 100) + '...');
+
+ groqConversationHistory.push({
+ role: 'user',
+ content: transcription.trim()
+ });
+
+ if (groqConversationHistory.length > 20) {
+ groqConversationHistory = groqConversationHistory.slice(-20);
+ }
+
+ try {
+ // Ensure baseUrl ends with /v1/chat/completions or contains the full endpoint
+ let apiUrl = config.baseUrl.trim();
+ if (!apiUrl.includes('/chat/completions')) {
+ // Remove trailing slash if present
+ apiUrl = apiUrl.replace(/\/$/, '');
+ // Add OpenAI-compatible endpoint path
+ apiUrl = `${apiUrl}/v1/chat/completions`;
+ }
+
+ console.log(`Using OpenAI-compatible endpoint: ${apiUrl}`);
+
+ const response = await fetch(apiUrl, {
+ method: 'POST',
+ headers: {
+ 'Authorization': `Bearer ${config.apiKey}`,
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ model: config.model,
+ messages: [
+ { role: 'system', content: currentSystemPrompt || 'You are a helpful assistant.' },
+ ...groqConversationHistory
+ ],
+ stream: true,
+ temperature: 0.7,
+ max_tokens: 2048
+ })
+ });
+
+ if (!response.ok) {
+ const errorText = await response.text();
+ console.error('OpenAI-compatible API error:', response.status, errorText);
+ sendToRenderer('update-status', `OpenAI API error: ${response.status}`);
+ return;
+ }
+
+ const reader = response.body.getReader();
+ const decoder = new TextDecoder();
+ let fullText = '';
+ let isFirst = true;
+
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
+
+ const chunk = decoder.decode(value, { stream: true });
+ const lines = chunk.split('\n').filter(line => line.trim() !== '');
+
+ for (const line of lines) {
+ if (line.startsWith('data: ')) {
+ const data = line.slice(6);
+ if (data === '[DONE]') continue;
+
+ try {
+ const parsed = JSON.parse(data);
+ const content = parsed.choices?.[0]?.delta?.content;
+
+ if (content) {
+ fullText += content;
+ sendToRenderer(isFirst ? 'new-response' : 'update-response', fullText);
+ isFirst = false;
+ }
+ } catch (e) {
+ // Ignore JSON parse errors from partial chunks
+ }
+ }
+ }
+ }
+
+ // Clean up tags if present (for DeepSeek-style reasoning models)
+ const cleanText = stripThinkingTags(fullText);
+ if (cleanText !== fullText) {
+ sendToRenderer('update-response', cleanText);
+ }
+
+ if (fullText.trim()) {
+ groqConversationHistory.push({
+ role: 'assistant',
+ content: fullText.trim()
+ });
+
+ if (groqConversationHistory.length > 40) {
+ groqConversationHistory = groqConversationHistory.slice(-40);
+ }
+
+ saveConversationTurn(transcription, fullText);
+ }
+
+ console.log(`OpenAI-compatible API response completed (${config.model})`);
+ sendToRenderer('update-status', 'Listening...');
+
+ } catch (error) {
+ console.error('Error calling OpenAI-compatible API:', error);
+ sendToRenderer('update-status', 'OpenAI API error: ' + error.message);
+ }
+}
+
async function sendToGemma(transcription) {
const apiKey = getApiKey();
if (!apiKey) {
@@ -442,6 +575,14 @@ async function initializeGeminiSession(apiKey, customPrompt = '', profile = 'int
sessionParams = { apiKey, customPrompt, profile, language };
reconnectAttempts = 0;
}
+
+ // Load response provider preference
+ if (!isReconnect) {
+ const { getPreferences } = require('../storage');
+ const prefs = getPreferences();
+ currentResponseProvider = prefs.responseProvider || 'gemini';
+ console.log('🔧 Response provider set to:', currentResponseProvider);
+ }
const client = new GoogleGenAI({
vertexai: false,
@@ -488,17 +629,32 @@ async function initializeGeminiSession(apiKey, customPrompt = '', profile = 'int
// if (message.serverContent?.outputTranscription?.text) { ... }
if (message.serverContent?.generationComplete) {
- console.log('Generation complete. Current transcription:', `"${currentTranscription}"`);
+ console.log('✅ Generation complete. Current transcription:', `"${currentTranscription}"`);
if (currentTranscription.trim() !== '') {
- console.log('Sending to', hasGroqKey() ? 'Groq' : 'Gemma');
- if (hasGroqKey()) {
- sendToGroq(currentTranscription);
+ // Use explicit user choice for response provider
+ if (currentResponseProvider === 'openai-compatible') {
+ if (hasOpenAICompatibleConfig()) {
+ console.log('📤 Sending to OpenAI-compatible API (user selected)');
+ sendToOpenAICompatible(currentTranscription);
+ } else {
+ console.log('⚠️ OpenAI-compatible selected but not configured, falling back to Gemini');
+ sendToGemma(currentTranscription);
+ }
+ } else if (currentResponseProvider === 'groq') {
+ if (hasGroqKey()) {
+ console.log('📤 Sending to Groq (user selected)');
+ sendToGroq(currentTranscription);
+ } else {
+ console.log('⚠️ Groq selected but not configured, falling back to Gemini');
+ sendToGemma(currentTranscription);
+ }
} else {
+ console.log('📤 Sending to Gemini (user selected)');
sendToGemma(currentTranscription);
}
currentTranscription = '';
} else {
- console.log('Transcription is empty, not sending to LLM');
+ console.log('⚠️ Transcription is empty, not sending to LLM');
}
messageBuffer = '';
}
@@ -954,8 +1110,19 @@ function setupGeminiIpcHandlers(geminiSessionRef) {
try {
console.log('Sending text message:', text);
- if (hasGroqKey()) {
- sendToGroq(text.trim());
+ // Use explicit user choice for response provider
+ if (currentResponseProvider === 'openai-compatible') {
+ if (hasOpenAICompatibleConfig()) {
+ sendToOpenAICompatible(text.trim());
+ } else {
+ sendToGemma(text.trim());
+ }
+ } else if (currentResponseProvider === 'groq') {
+ if (hasGroqKey()) {
+ sendToGroq(text.trim());
+ } else {
+ sendToGemma(text.trim());
+ }
} else {
sendToGemma(text.trim());
}
@@ -1053,6 +1220,29 @@ function setupGeminiIpcHandlers(geminiSessionRef) {
return { success: false, error: error.message };
}
});
+
+ // OpenAI-compatible API configuration handlers
+ ipcMain.handle('set-openai-compatible-config', async (event, apiKey, baseUrl, model) => {
+ try {
+ const { setOpenAICompatibleConfig } = require('../storage');
+ setOpenAICompatibleConfig(apiKey, baseUrl, model);
+ console.log('OpenAI-compatible config saved:', { baseUrl, model: model.substring(0, 30) });
+ return { success: true };
+ } catch (error) {
+ console.error('Error setting OpenAI-compatible config:', error);
+ return { success: false, error: error.message };
+ }
+ });
+
+ ipcMain.handle('get-openai-compatible-config', async (event) => {
+ try {
+ const config = getOpenAICompatibleConfig();
+ return { success: true, config };
+ } catch (error) {
+ console.error('Error getting OpenAI-compatible config:', error);
+ return { success: false, error: error.message };
+ }
+ });
}
module.exports = {
@@ -1071,4 +1261,6 @@ module.exports = {
sendImageToGeminiHttp,
setupGeminiIpcHandlers,
formatSpeakerResults,
+ hasOpenAICompatibleConfig,
+ sendToOpenAICompatible,
};
diff --git a/src/utils/renderer.js b/src/utils/renderer.js
index 3ada501..b07a51b 100644
--- a/src/utils/renderer.js
+++ b/src/utils/renderer.js
@@ -56,6 +56,13 @@ const storage = {
async setGroqApiKey(groqApiKey) {
return ipcRenderer.invoke('storage:set-groq-api-key', groqApiKey);
},
+ async getOpenAICompatibleConfig() {
+ const result = await ipcRenderer.invoke('get-openai-compatible-config');
+ return result.success ? result.config : { apiKey: '', baseUrl: '', model: '' };
+ },
+ async setOpenAICompatibleConfig(apiKey, baseUrl, model) {
+ return ipcRenderer.invoke('set-openai-compatible-config', apiKey, baseUrl, model);
+ },
// Preferences
async getPreferences() {