From 01b8415a02d156898780e601b3ba988bb5e8c929 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=98=D0=BB=D1=8C=D1=8F=20=D0=93=D0=BB=D0=B0=D0=B7=D1=83?= =?UTF-8?q?=D0=BD=D0=BE=D0=B2?= Date: Thu, 15 Jan 2026 19:03:54 +0300 Subject: [PATCH] Bump version to 0.5.2 and enhance logging for renderer and audio processing --- package.json | 2 +- src/index.js | 8 +++ src/utils/renderer.js | 132 +++++++++++++++++++++++++++++++++--------- src/utils/window.js | 20 ++++++- 4 files changed, 132 insertions(+), 30 deletions(-) diff --git a/package.json b/package.json index 3e66720..100244b 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "cheating-daddy", "productName": "cheating-daddy", - "version": "0.5.0", + "version": "0.5.2", "description": "cheating daddy", "main": "src/index.js", "scripts": { diff --git a/src/index.js b/src/index.js index 89aa83f..80a8e27 100644 --- a/src/index.js +++ b/src/index.js @@ -39,6 +39,14 @@ app.whenReady().then(async () => { // Add handler to get log path from renderer ipcMain.handle('get-log-path', () => getLogPath()); + + // Add handler for renderer logs (so they go to the log file) + ipcMain.on('renderer-log', (event, { level, message }) => { + const prefix = '[RENDERER]'; + if (level === 'error') console.error(prefix, message); + else if (level === 'warn') console.warn(prefix, message); + else console.log(prefix, message); + }); }); app.on('window-all-closed', () => { diff --git a/src/utils/renderer.js b/src/utils/renderer.js index 10d1edb..f210348 100644 --- a/src/utils/renderer.js +++ b/src/utils/renderer.js @@ -18,6 +18,23 @@ let currentImageQuality = 'medium'; // Store current image quality for manual sc const isLinux = process.platform === 'linux'; const isMacOS = process.platform === 'darwin'; +const isWindows = process.platform === 'win32'; + +// Send logs to main process for file logging +function logToMain(level, ...args) { + const message = args.map(arg => { + if (typeof arg === 'object') { + try { return JSON.stringify(arg); } catch { return String(arg); } + } + return String(arg); + }).join(' '); + ipcRenderer.send('renderer-log', { level, message }); + + // Also log to console + if (level === 'error') console.error(...args); + else if (level === 'warn') console.warn(...args); + else console.log(...args); +} // ============ STORAGE API ============ // Wrapper for IPC-based storage access @@ -278,6 +295,9 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu console.log('Linux capture started - system audio:', mediaStream.getAudioTracks().length > 0, 'microphone mode:', audioMode); } else { // Windows - use display media with loopback for system audio + logToMain('info', '=== Starting Windows audio capture ==='); + cheatingDaddy.setStatus('Requesting screen & audio...'); + mediaStream = await navigator.mediaDevices.getDisplayMedia({ video: { frameRate: 1, @@ -293,10 +313,29 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu }, }); - console.log('Windows capture started with loopback audio'); + const audioTracks = mediaStream.getAudioTracks(); + const videoTracks = mediaStream.getVideoTracks(); + + logToMain('info', 'Windows capture result:', { + hasVideo: videoTracks.length > 0, + hasAudio: audioTracks.length > 0, + audioTrackInfo: audioTracks.map(t => ({ + label: t.label, + enabled: t.enabled, + muted: t.muted, + readyState: t.readyState, + settings: t.getSettings() + })), + }); - // Setup audio processing for Windows loopback audio only - setupWindowsLoopbackProcessing(); + if (audioTracks.length === 0) { + logToMain('warn', 'WARNING: No audio tracks! User must check "Share audio" in screen picker dialog'); + cheatingDaddy.setStatus('Warning: No audio - enable "Share audio" checkbox'); + } else { + logToMain('info', 'Audio track acquired, setting up processing...'); + // Setup audio processing for Windows loopback audio only + setupWindowsLoopbackProcessing(); + } if (audioMode === 'mic_only' || audioMode === 'both') { let micStream = null; @@ -412,32 +451,73 @@ function setupLinuxSystemAudioProcessing() { function setupWindowsLoopbackProcessing() { // Setup audio processing for Windows loopback audio only - audioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); - const source = audioContext.createMediaStreamSource(mediaStream); - audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); - - let audioBuffer = []; - const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; - - audioProcessor.onaudioprocess = async e => { - const inputData = e.inputBuffer.getChannelData(0); - audioBuffer.push(...inputData); - - // Process audio in chunks - while (audioBuffer.length >= samplesPerChunk) { - const chunk = audioBuffer.splice(0, samplesPerChunk); - const pcmData16 = convertFloat32ToInt16(chunk); - const base64Data = arrayBufferToBase64(pcmData16.buffer); - - await ipcRenderer.invoke('send-audio-content', { - data: base64Data, - mimeType: 'audio/pcm;rate=24000', + logToMain('info', 'Setting up Windows loopback audio processing...'); + + try { + audioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); + + logToMain('info', 'AudioContext created:', { + state: audioContext.state, + sampleRate: audioContext.sampleRate, + }); + + // Resume AudioContext if suspended (Chrome policy) + if (audioContext.state === 'suspended') { + logToMain('warn', 'AudioContext suspended, attempting resume...'); + audioContext.resume().then(() => { + logToMain('info', 'AudioContext resumed successfully'); + }).catch(err => { + logToMain('error', 'Failed to resume AudioContext:', err.message); }); } - }; + + const source = audioContext.createMediaStreamSource(mediaStream); + audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); - source.connect(audioProcessor); - audioProcessor.connect(audioContext.destination); + let audioBuffer = []; + const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; + let chunkCount = 0; + let totalSamples = 0; + + audioProcessor.onaudioprocess = async e => { + const inputData = e.inputBuffer.getChannelData(0); + audioBuffer.push(...inputData); + totalSamples += inputData.length; + + // Process audio in chunks + while (audioBuffer.length >= samplesPerChunk) { + const chunk = audioBuffer.splice(0, samplesPerChunk); + const pcmData16 = convertFloat32ToInt16(chunk); + const base64Data = arrayBufferToBase64(pcmData16.buffer); + + await ipcRenderer.invoke('send-audio-content', { + data: base64Data, + mimeType: 'audio/pcm;rate=24000', + }); + + chunkCount++; + + // Log progress every 100 chunks (~10 seconds) + if (chunkCount === 1) { + logToMain('info', 'First audio chunk sent to AI'); + cheatingDaddy.setStatus('Listening...'); + } else if (chunkCount % 100 === 0) { + // Calculate max amplitude to check if we're getting real audio + const maxAmp = Math.max(...chunk.map(Math.abs)); + logToMain('info', `Audio progress: ${chunkCount} chunks, maxAmplitude: ${maxAmp.toFixed(4)}`); + } + } + }; + + source.connect(audioProcessor); + audioProcessor.connect(audioContext.destination); + + logToMain('info', 'Windows audio processing pipeline connected'); + + } catch (err) { + logToMain('error', 'Error setting up Windows audio:', err.message, err.stack); + cheatingDaddy.setStatus('Audio error: ' + err.message); + } } async function captureScreenshot(imageQuality = 'medium', isManual = false) { diff --git a/src/utils/window.js b/src/utils/window.js index 3c0f0a8..1c7f261 100644 --- a/src/utils/window.js +++ b/src/utils/window.js @@ -35,8 +35,9 @@ function createWindow(sendToRenderer, geminiSessionRef) { const { session, desktopCapturer } = require('electron'); // Setup display media request handler for screen capture - // On macOS, use system picker for better UX if (process.platform === 'darwin') { + // On macOS, use SystemAudioDump for audio (not browser loopback) + // So we just need to capture the screen session.defaultSession.setDisplayMediaRequestHandler( async (request, callback) => { try { @@ -52,6 +53,7 @@ function createWindow(sendToRenderer, geminiSessionRef) { } // On macOS, directly use the first screen (system already granted permission) + // Audio is handled separately by SystemAudioDump console.log('Screen capture source:', sources[0].name); callback({ video: sources[0], audio: 'loopback' }); } catch (error) { @@ -61,8 +63,19 @@ function createWindow(sendToRenderer, geminiSessionRef) { }, { useSystemPicker: false } // Disable system picker, use our source directly ); + } else if (process.platform === 'win32') { + // On Windows, use system picker so user can enable "Share audio" checkbox + // This is REQUIRED for system audio capture on Windows + session.defaultSession.setDisplayMediaRequestHandler( + (request, callback) => { + console.log('Windows: Using system picker for screen/audio selection'); + // Don't call callback - let system picker handle it + // The system picker will provide both video and audio if user checks the box + }, + { useSystemPicker: true } + ); } else { - // On other platforms, use the system picker + // On Linux, try to get system audio via loopback session.defaultSession.setDisplayMediaRequestHandler( async (request, callback) => { try { @@ -77,13 +90,14 @@ function createWindow(sendToRenderer, geminiSessionRef) { return; } + console.log('Linux: Using screen source with loopback audio'); callback({ video: sources[0], audio: 'loopback' }); } catch (error) { console.error('Error getting screen sources:', error); callback(null); } }, - { useSystemPicker: true } + { useSystemPicker: false } ); }